Compare commits
1 Commits
gitpkgs
...
wip_manual
| Author | SHA256 | Date | |
|---|---|---|---|
| 51b3d5942d |
@@ -1,34 +0,0 @@
|
||||
name: go-generate-check
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
paths:
|
||||
- '**.go'
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**.go'
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-check:
|
||||
name: go-generate-check
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: go generate -C workflow-pr/interfaces
|
||||
- run: git add -N .; git diff
|
||||
- run: |
|
||||
status=$(git status --short)
|
||||
if [[ -n "$status" ]]; then
|
||||
echo -e "$status"
|
||||
echo "Please commit the differences from running: go generate"
|
||||
false
|
||||
fi
|
||||
@@ -1,25 +0,0 @@
|
||||
name: go-generate-push
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-push:
|
||||
name: go-generate-push
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: go generate -C workflow-pr/interfaces
|
||||
- run: |
|
||||
host=${{ gitea.server_url }}
|
||||
host=${host#https://}
|
||||
echo $host
|
||||
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
|
||||
git config user.name "Gitea Actions"
|
||||
git config user.email "gitea_noreply@opensuse.org"
|
||||
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go generate"; git push origin HEAD:${{ gitea.ref }} )'
|
||||
- run: git log -p FETCH_HEAD...HEAD
|
||||
- run: git log --numstat FETCH_HEAD...HEAD
|
||||
@@ -1,33 +0,0 @@
|
||||
name: go-vendor-check
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
paths:
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-check:
|
||||
name: go-vendor-check
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go mod download
|
||||
- run: go mod vendor
|
||||
- run: go mod verify
|
||||
- run: git add -N .; git diff
|
||||
- run: go mod tidy -diff || true
|
||||
- run: |
|
||||
status=$(git status --short)
|
||||
if [[ -n "$status" ]]; then
|
||||
echo -e "$status"
|
||||
echo "Please commit the differences from running: go generate"
|
||||
false
|
||||
fi
|
||||
@@ -1,26 +0,0 @@
|
||||
name: go-generate-push
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-push:
|
||||
name: go-generate-push
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go mod download
|
||||
- run: go mod vendor
|
||||
- run: go mod verify
|
||||
- run: |
|
||||
host=${{ gitea.server_url }}
|
||||
host=${host#https://}
|
||||
echo $host
|
||||
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
|
||||
git config user.name "Gitea Actions"
|
||||
git config user.email "gitea_noreply@opensuse.org"
|
||||
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go mod vendor"; git push origin HEAD:${{ gitea.ref }} )'
|
||||
- run: go mod tidy -diff || true
|
||||
- run: git log -p FETCH_HEAD...HEAD
|
||||
- run: git log --numstat FETCH_HEAD...HEAD
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,2 +1,6 @@
|
||||
mock
|
||||
node_modules
|
||||
*.obscpio
|
||||
autogits-tmp.tar.zst
|
||||
*.osc
|
||||
*.conf
|
||||
|
||||
12
README.md
12
README.md
@@ -23,18 +23,18 @@ The bots that drive Git Workflow for package management
|
||||
Bugs
|
||||
----
|
||||
|
||||
Report bugs to issue tracker at https://src.opensuse.org/git-workflow/autogits
|
||||
Report bugs to issue tracker at https://src.opensuse.org/adamm/autogits
|
||||
|
||||
|
||||
Build Status
|
||||
------------
|
||||
|
||||
Devel project build status (`main` branch):
|
||||
main branch build status:
|
||||
|
||||

|
||||
|
||||
Devel project build status:
|
||||
|
||||

|
||||
|
||||
`staging` branch build status:
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
15
_service
Normal file
15
_service
Normal file
@@ -0,0 +1,15 @@
|
||||
<services>
|
||||
<!-- workaround, go_modules needs a tar and obs_scm doesn't take file://. -->
|
||||
<service name="roast" mode="manual">
|
||||
<param name="target">.</param>
|
||||
<param name="reproducible">true</param>
|
||||
<param name="outfile">autogits-tmp.tar.zst</param>
|
||||
<param name="exclude">autogits-tmp.tar.zst</param>
|
||||
</service>
|
||||
<service name="go_modules" mode="manual">
|
||||
<param name="basename">./</param>
|
||||
<param name="compression">zst</param>
|
||||
<param name="vendorname">vendor</param>
|
||||
</service>
|
||||
</services>
|
||||
|
||||
124
autogits.spec
124
autogits.spec
@@ -17,12 +17,11 @@
|
||||
|
||||
|
||||
Name: autogits
|
||||
Version: 1
|
||||
Version: 0
|
||||
Release: 0
|
||||
Summary: GitWorkflow utilities
|
||||
License: GPL-2.0-or-later
|
||||
URL: https://src.opensuse.org/adamm/autogits
|
||||
BuildRequires: git
|
||||
BuildRequires: systemd-rpm-macros
|
||||
BuildRequires: go
|
||||
%{?systemd_ordering}
|
||||
@@ -31,21 +30,13 @@ BuildRequires: go
|
||||
Git Workflow tooling and utilities enabling automated handing of OBS projects
|
||||
as git repositories
|
||||
|
||||
%package utils
|
||||
Summary: HuJSON to JSON parser
|
||||
Provides: hujson
|
||||
Provides: /usr/bin/hujson
|
||||
|
||||
%package devel-importer
|
||||
Summary: Imports devel projects from obs to git
|
||||
|
||||
%description -n autogits-devel-importer
|
||||
Command-line tool to import devel projects from obs to git
|
||||
|
||||
|
||||
%package doc
|
||||
Summary: Common documentation files
|
||||
BuildArch: noarch
|
||||
|
||||
%description -n autogits-doc
|
||||
Common documentation files
|
||||
|
||||
%description utils
|
||||
HuJSON to JSON parser, using stdin -> stdout pipe
|
||||
|
||||
%package gitea-events-rabbitmq-publisher
|
||||
Summary: Publishes Gitea webhook data via RabbitMQ
|
||||
@@ -56,12 +47,12 @@ with a topic
|
||||
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
|
||||
|
||||
|
||||
%package gitea-status-proxy
|
||||
Summary: Proxy for setting commit status in Gitea
|
||||
%package doc
|
||||
Summary: Common documentation files
|
||||
|
||||
%description doc
|
||||
Common documentation files
|
||||
|
||||
%description gitea-status-proxy
|
||||
Setting commit status requires code write access token. This proxy
|
||||
is middleware that delegates status setting without access to other APIs
|
||||
|
||||
%package group-review
|
||||
Summary: Reviews of groups defined in ProjectGit
|
||||
@@ -71,12 +62,6 @@ Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
|
||||
%package obs-forward-bot
|
||||
Summary: obs-forward-bot
|
||||
|
||||
%description obs-forward-bot
|
||||
|
||||
|
||||
%package obs-staging-bot
|
||||
Summary: Build a PR against a ProjectGit, if review is requested
|
||||
|
||||
@@ -91,19 +76,8 @@ Summary: Reports build status of OBS service as an easily to produce SVG
|
||||
Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
|
||||
%package utils
|
||||
Summary: HuJSON to JSON parser
|
||||
Provides: hujson
|
||||
Provides: /usr/bin/hujson
|
||||
|
||||
%description utils
|
||||
HuJSON to JSON parser, using stdin -> stdout pipe
|
||||
|
||||
|
||||
%package workflow-direct
|
||||
Summary: Keep ProjectGit in sync for a devel project
|
||||
Requires: openssh-clients
|
||||
Requires: git-core
|
||||
|
||||
%description workflow-direct
|
||||
Keep ProjectGit in sync with packages in the organization of a devel project
|
||||
@@ -111,8 +85,6 @@ Keep ProjectGit in sync with packages in the organization of a devel project
|
||||
|
||||
%package workflow-pr
|
||||
Summary: Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
Requires: openssh-clients
|
||||
Requires: git-core
|
||||
|
||||
%description workflow-pr
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
@@ -123,24 +95,15 @@ Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
cp -r /home/abuild/rpmbuild/SOURCES/* ./
|
||||
|
||||
%build
|
||||
go build \
|
||||
-C devel-importer \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C utils/hujson \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea-events-rabbitmq-publisher \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea_status_proxy \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C group-review \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-forward-bot \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-staging-bot \
|
||||
-buildmode=pie
|
||||
@@ -154,29 +117,15 @@ go build \
|
||||
-C workflow-pr \
|
||||
-buildmode=pie
|
||||
|
||||
%check
|
||||
go test -C common -v
|
||||
go test -C group-review -v
|
||||
go test -C obs-staging-bot -v
|
||||
go test -C obs-status-service -v
|
||||
go test -C workflow-direct -v
|
||||
# TODO build fails
|
||||
#go test -C workflow-pr -v
|
||||
|
||||
%install
|
||||
install -D -m0755 devel-importer/devel-importer %{buildroot}%{_bindir}/devel-importer
|
||||
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
install -D -m0755 gitea_status_proxy/gitea_status_proxy %{buildroot}%{_bindir}/gitea_status_proxy
|
||||
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
|
||||
install -D -m0644 systemd/group-review@.service %{buildroot}%{_unitdir}/group-review@.service
|
||||
install -D -m0755 obs-forward-bot/obs-forward-bot %{buildroot}%{_bindir}/obs-forward-bot
|
||||
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
|
||||
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
|
||||
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
|
||||
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
|
||||
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
|
||||
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
|
||||
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
|
||||
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
|
||||
|
||||
@@ -192,18 +141,6 @@ install -D -m0755 utils/hujson/hujson
|
||||
%postun gitea-events-rabbitmq-publisher
|
||||
%service_del_postun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%pre group-review
|
||||
%service_add_pre group-review@.service
|
||||
|
||||
%post group-review
|
||||
%service_add_post group-review@.service
|
||||
|
||||
%preun group-review
|
||||
%service_del_preun group-review@.service
|
||||
|
||||
%postun group-review
|
||||
%service_del_postun group-review@.service
|
||||
|
||||
%pre obs-staging-bot
|
||||
%service_add_pre obs-staging-bot.service
|
||||
|
||||
@@ -228,47 +165,25 @@ install -D -m0755 utils/hujson/hujson
|
||||
%postun obs-status-service
|
||||
%service_del_postun obs-status-service.service
|
||||
|
||||
%pre workflow-pr
|
||||
%service_add_pre workflow-direct@.service
|
||||
|
||||
%post workflow-pr
|
||||
%service_add_post workflow-direct@.service
|
||||
|
||||
%preun workflow-pr
|
||||
%service_del_preun workflow-direct@.service
|
||||
|
||||
%postun workflow-pr
|
||||
%service_del_postun workflow-direct@.service
|
||||
|
||||
%files devel-importer
|
||||
%license COPYING
|
||||
%doc devel-importer/README.md
|
||||
%{_bindir}/devel-importer
|
||||
|
||||
%files doc
|
||||
%license COPYING
|
||||
%doc doc/README.md
|
||||
%doc doc/workflows.md
|
||||
|
||||
%files gitea-events-rabbitmq-publisher
|
||||
%license COPYING
|
||||
%doc gitea-events-rabbitmq-publisher/README.md
|
||||
%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%files gitea-status-proxy
|
||||
%files doc
|
||||
%license COPYING
|
||||
%{_bindir}/gitea_status_proxy
|
||||
%doc doc/README.md
|
||||
%doc doc/workflows.md
|
||||
|
||||
%files group-review
|
||||
%license COPYING
|
||||
%doc group-review/README.md
|
||||
%{_bindir}/group-review
|
||||
%{_unitdir}/group-review@.service
|
||||
|
||||
%files obs-forward-bot
|
||||
%files utils
|
||||
%license COPYING
|
||||
%{_bindir}/obs-forward-bot
|
||||
%{_bindir}/hujson
|
||||
|
||||
%files obs-staging-bot
|
||||
%license COPYING
|
||||
@@ -282,15 +197,10 @@ install -D -m0755 utils/hujson/hujson
|
||||
%{_bindir}/obs-status-service
|
||||
%{_unitdir}/obs-status-service.service
|
||||
|
||||
%files utils
|
||||
%license COPYING
|
||||
%{_bindir}/hujson
|
||||
|
||||
%files workflow-direct
|
||||
%license COPYING
|
||||
%doc workflow-direct/README.md
|
||||
%{_bindir}/workflow-direct
|
||||
%{_unitdir}/workflow-direct@.service
|
||||
|
||||
%files workflow-pr
|
||||
%license COPYING
|
||||
|
||||
@@ -61,20 +61,6 @@ type Permissions struct {
|
||||
Members []string
|
||||
}
|
||||
|
||||
const (
|
||||
Label_StagingAuto = "staging/Auto"
|
||||
Label_ReviewPending = "review/Pending"
|
||||
Label_ReviewDone = "review/Done"
|
||||
)
|
||||
|
||||
func LabelKey(tag_value string) string {
|
||||
// capitalize first letter and remove /
|
||||
if len(tag_value) == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToUpper(tag_value[0:1]) + strings.ReplaceAll(tag_value[1:], "/", "")
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
@@ -86,8 +72,6 @@ type AutogitConfig struct {
|
||||
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
|
||||
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
|
||||
|
||||
Labels map[string]string // list of tags, if not default, to apply
|
||||
|
||||
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
|
||||
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
|
||||
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
|
||||
@@ -204,8 +188,6 @@ func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *Autogit
|
||||
if c.GitProjectName == prjgit {
|
||||
return c
|
||||
}
|
||||
}
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
@@ -291,14 +273,6 @@ func (config *AutogitConfig) GetRemoteBranch() string {
|
||||
return "origin_" + config.Branch
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) Label(label string) string {
|
||||
if t, found := config.Labels[LabelKey(label)]; found {
|
||||
return t
|
||||
}
|
||||
|
||||
return label
|
||||
}
|
||||
|
||||
type StagingConfig struct {
|
||||
ObsProject string
|
||||
RebuildAll bool
|
||||
|
||||
@@ -10,67 +10,6 @@ import (
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestLabelKey(t *testing.T) {
|
||||
tests := map[string]string{
|
||||
"": "",
|
||||
"foo": "Foo",
|
||||
"foo/bar": "Foobar",
|
||||
"foo/Bar": "FooBar",
|
||||
}
|
||||
|
||||
for k, v := range tests {
|
||||
if c := common.LabelKey(k); c != v {
|
||||
t.Error("expected", v, "got", c, "input", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigLabelParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
json string
|
||||
label_value string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
json: "{}",
|
||||
label_value: "path/String",
|
||||
},
|
||||
{
|
||||
name: "defined",
|
||||
json: `{"Labels": {"foo": "bar", "PathString": "moo/Label"}}`,
|
||||
label_value: "moo/Label",
|
||||
},
|
||||
{
|
||||
name: "undefined",
|
||||
json: `{"Labels": {"foo": "bar", "NotPathString": "moo/Label"}}`,
|
||||
label_value: "path/String",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
repo := models.Repository{
|
||||
DefaultBranch: "master",
|
||||
}
|
||||
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if err != nil || config == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if l := config.Label("path/String"); l != test.label_value {
|
||||
t.Error("Expecting", test.label_value, "got", l)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProjectConfigMatcher(t *testing.T) {
|
||||
configs := common.AutogitConfigs{
|
||||
{
|
||||
@@ -82,15 +21,6 @@ func TestProjectConfigMatcher(t *testing.T) {
|
||||
Branch: "main",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
Branch: "main",
|
||||
GitProjectName: "test/bar#never_match",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
GitProjectName: "test/bar#main",
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
@@ -120,20 +50,6 @@ func TestProjectConfigMatcher(t *testing.T) {
|
||||
branch: "main",
|
||||
config: 1,
|
||||
},
|
||||
{
|
||||
name: "prjgit only match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "main",
|
||||
config: 3,
|
||||
},
|
||||
{
|
||||
name: "non-default branch match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "something_main",
|
||||
config: -1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -189,10 +105,6 @@ func TestConfigWorkflowParser(t *testing.T) {
|
||||
if config.ManualMergeOnly != false {
|
||||
t.Fatal("This should be false")
|
||||
}
|
||||
|
||||
if config.Label("foobar") != "foobar" {
|
||||
t.Fatal("undefined label should return default value")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -277,7 +277,7 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
|
||||
args = slices.Insert(args, 1, "--unshallow")
|
||||
}
|
||||
e.GitExecOrPanic(repo, args...)
|
||||
return remoteName, e.GitExec(repo, "checkout", "-f", "--track", "-B", branch, remoteRef)
|
||||
return remoteName, e.GitExec(repo, "checkout", "--track", "-B", branch, remoteRef)
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
@@ -350,10 +350,6 @@ var ExtraGitParams []string
|
||||
|
||||
func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string, error) {
|
||||
cmd := exec.Command("/usr/bin/git", params...)
|
||||
var identityFile string
|
||||
if i := os.Getenv("AUTOGITS_IDENTITY_FILE"); len(i) > 0 {
|
||||
identityFile = " -i " + i
|
||||
}
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
@@ -362,7 +358,7 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
|
||||
"EMAIL=not@exist@src.opensuse.org",
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_LFS_SKIP_PUSH=1",
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes" + identityFile,
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
|
||||
}
|
||||
if len(ExtraGitParams) > 0 {
|
||||
cmd.Env = append(cmd.Env, ExtraGitParams...)
|
||||
|
||||
@@ -392,7 +392,6 @@ func TestCommitTreeParsing(t *testing.T) {
|
||||
commitId = commitId + strings.TrimSpace(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
transport "github.com/go-openapi/runtime/client"
|
||||
@@ -67,14 +66,6 @@ const (
|
||||
ReviewStateUnknown models.ReviewStateType = ""
|
||||
)
|
||||
|
||||
type GiteaLabelGetter interface {
|
||||
GetLabels(org, repo string, idx int64) ([]*models.Label, error)
|
||||
}
|
||||
|
||||
type GiteaLabelSettter interface {
|
||||
SetLabels(org, repo string, idx int64, labels []string) ([]*models.Label, error)
|
||||
}
|
||||
|
||||
type GiteaTimelineFetcher interface {
|
||||
GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error)
|
||||
}
|
||||
@@ -100,10 +91,9 @@ type GiteaPRUpdater interface {
|
||||
UpdatePullRequest(org, repo string, num int64, options *models.EditPullRequestOption) (*models.PullRequest, error)
|
||||
}
|
||||
|
||||
type GiteaPRTimelineReviewFetcher interface {
|
||||
type GiteaPRTimelineFetcher interface {
|
||||
GiteaPRFetcher
|
||||
GiteaTimelineFetcher
|
||||
GiteaReviewFetcher
|
||||
}
|
||||
|
||||
type GiteaCommitFetcher interface {
|
||||
@@ -129,16 +119,10 @@ type GiteaPRChecker interface {
|
||||
GiteaMaintainershipReader
|
||||
}
|
||||
|
||||
type GiteaReviewFetcherAndRequesterAndUnrequester interface {
|
||||
type GiteaReviewFetcherAndRequester interface {
|
||||
GiteaReviewTimelineFetcher
|
||||
GiteaCommentFetcher
|
||||
GiteaReviewRequester
|
||||
GiteaReviewUnrequester
|
||||
}
|
||||
|
||||
type GiteaUnreviewTimelineFetcher interface {
|
||||
GiteaTimelineFetcher
|
||||
GiteaReviewUnrequester
|
||||
}
|
||||
|
||||
type GiteaReviewRequester interface {
|
||||
@@ -198,8 +182,6 @@ type Gitea interface {
|
||||
GiteaCommitStatusGetter
|
||||
GiteaCommitStatusSetter
|
||||
GiteaSetRepoOptions
|
||||
GiteaLabelGetter
|
||||
GiteaLabelSettter
|
||||
|
||||
GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error)
|
||||
GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error)
|
||||
@@ -207,7 +189,7 @@ type Gitea interface {
|
||||
GetOrganization(orgName string) (*models.Organization, error)
|
||||
GetOrganizationRepositories(orgName string) ([]*models.Repository, error)
|
||||
CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error, bool)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error)
|
||||
GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error)
|
||||
GetRecentPullRequests(org, repo, branch string) ([]*models.PullRequest, error)
|
||||
GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error)
|
||||
@@ -484,30 +466,6 @@ func (gitea *GiteaTransport) SetRepoOptions(owner, repo string, manual_merge boo
|
||||
return ok.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetLabels(owner, repo string, idx int64) ([]*models.Label, error) {
|
||||
ret, err := gitea.client.Issue.IssueGetLabels(issue.NewIssueGetLabelsParams().WithOwner(owner).WithRepo(repo).WithIndex(idx), gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) SetLabels(owner, repo string, idx int64, labels []string) ([]*models.Label, error) {
|
||||
interfaceLabels := make([]interface{}, len(labels))
|
||||
for i, l := range labels {
|
||||
interfaceLabels[i] = l
|
||||
}
|
||||
|
||||
ret, err := gitea.client.Issue.IssueAddLabel(issue.NewIssueAddLabelParams().WithOwner(owner).WithRepo(repo).WithIndex(idx).WithBody(&models.IssueLabelsOption{Labels: interfaceLabels}),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret.Payload, nil
|
||||
}
|
||||
|
||||
const (
|
||||
GiteaNotificationType_Pull = "Pull"
|
||||
)
|
||||
@@ -685,7 +643,7 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName s
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error, bool) {
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
prOptions := models.CreatePullRequestOption{
|
||||
Base: targetId,
|
||||
Head: srcId,
|
||||
@@ -701,7 +659,7 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
|
||||
WithHead(srcId),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
); err == nil && pr.Payload.State == "open" {
|
||||
return pr.Payload, nil, false
|
||||
return pr.Payload, nil
|
||||
}
|
||||
|
||||
pr, err := gitea.client.Repository.RepoCreatePullRequest(
|
||||
@@ -715,10 +673,10 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create pull request. %w", err), true
|
||||
return nil, fmt.Errorf("Cannot create pull request. %w", err)
|
||||
}
|
||||
|
||||
return pr.GetPayload(), nil, true
|
||||
return pr.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewers ...string) ([]*models.PullReview, error) {
|
||||
@@ -805,79 +763,45 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimelineCacheData struct {
|
||||
data []*models.TimelineComment
|
||||
lastCheck time.Time
|
||||
}
|
||||
|
||||
var giteaTimelineCache map[string]TimelineCacheData = make(map[string]TimelineCacheData)
|
||||
var giteaTimelineCacheMutex sync.RWMutex
|
||||
|
||||
// returns timeline in reverse chronological create order
|
||||
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
|
||||
page := int64(1)
|
||||
resCount := 1
|
||||
|
||||
prID := fmt.Sprintf("%s/%s!%d", org, repo, idx)
|
||||
giteaTimelineCacheMutex.RLock()
|
||||
TimelineCache, IsCached := giteaTimelineCache[prID]
|
||||
var LastCachedTime strfmt.DateTime
|
||||
if IsCached {
|
||||
l := len(TimelineCache.data)
|
||||
if l > 0 {
|
||||
LastCachedTime = TimelineCache.data[0].Updated
|
||||
}
|
||||
|
||||
// cache data for 5 seconds
|
||||
if TimelineCache.lastCheck.Add(time.Second*5).Compare(time.Now()) > 0 {
|
||||
giteaTimelineCacheMutex.RUnlock()
|
||||
return TimelineCache.data, nil
|
||||
}
|
||||
}
|
||||
giteaTimelineCacheMutex.RUnlock()
|
||||
|
||||
giteaTimelineCacheMutex.Lock()
|
||||
defer giteaTimelineCacheMutex.Unlock()
|
||||
retData := []*models.TimelineComment{}
|
||||
|
||||
for resCount > 0 {
|
||||
opts := issue.NewIssueGetCommentsAndTimelineParams().WithOwner(org).WithRepo(repo).WithIndex(idx).WithPage(&page)
|
||||
if !LastCachedTime.IsZero() {
|
||||
opts = opts.WithSince(&LastCachedTime)
|
||||
}
|
||||
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(opts, gitea.transport.DefaultAuthentication)
|
||||
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
|
||||
issue.NewIssueGetCommentsAndTimelineParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithIndex(idx).
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resCount = len(res.Payload); resCount == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
for _, d := range res.Payload {
|
||||
if d != nil {
|
||||
if time.Time(d.Created).Compare(time.Time(LastCachedTime)) > 0 {
|
||||
// created after last check, so we append here
|
||||
TimelineCache.data = append(TimelineCache.data, d)
|
||||
} else {
|
||||
// we need something updated in the timeline, maybe
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if resCount < 10 {
|
||||
resCount = len(res.Payload)
|
||||
LogDebug("page:", page, "len:", resCount)
|
||||
if resCount == 0 {
|
||||
break
|
||||
}
|
||||
page++
|
||||
|
||||
for _, d := range res.Payload {
|
||||
if d != nil {
|
||||
retData = append(retData, d)
|
||||
}
|
||||
}
|
||||
}
|
||||
LogDebug("timeline", prID, "# timeline:", len(TimelineCache.data))
|
||||
slices.SortFunc(TimelineCache.data, func(a, b *models.TimelineComment) int {
|
||||
LogDebug("total results:", len(retData))
|
||||
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
|
||||
return time.Time(b.Created).Compare(time.Time(a.Created))
|
||||
})
|
||||
|
||||
TimelineCache.lastCheck = time.Now()
|
||||
giteaTimelineCache[prID] = TimelineCache
|
||||
|
||||
return TimelineCache.data, nil
|
||||
return retData, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: config.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=config.go -destination=mock/config.go -typed
|
||||
//
|
||||
|
||||
// Package mock_common is a generated GoMock package.
|
||||
package mock_common
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
models "src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
// MockGiteaFileContentAndRepoFetcher is a mock of GiteaFileContentAndRepoFetcher interface.
|
||||
type MockGiteaFileContentAndRepoFetcher struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockGiteaFileContentAndRepoFetcherMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockGiteaFileContentAndRepoFetcherMockRecorder is the mock recorder for MockGiteaFileContentAndRepoFetcher.
|
||||
type MockGiteaFileContentAndRepoFetcherMockRecorder struct {
|
||||
mock *MockGiteaFileContentAndRepoFetcher
|
||||
}
|
||||
|
||||
// NewMockGiteaFileContentAndRepoFetcher creates a new mock instance.
|
||||
func NewMockGiteaFileContentAndRepoFetcher(ctrl *gomock.Controller) *MockGiteaFileContentAndRepoFetcher {
|
||||
mock := &MockGiteaFileContentAndRepoFetcher{ctrl: ctrl}
|
||||
mock.recorder = &MockGiteaFileContentAndRepoFetcherMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockGiteaFileContentAndRepoFetcher) EXPECT() *MockGiteaFileContentAndRepoFetcherMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// GetRepository mocks base method.
|
||||
func (m *MockGiteaFileContentAndRepoFetcher) GetRepository(org, repo string) (*models.Repository, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetRepository", org, repo)
|
||||
ret0, _ := ret[0].(*models.Repository)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetRepository indicates an expected call of GetRepository.
|
||||
func (mr *MockGiteaFileContentAndRepoFetcherMockRecorder) GetRepository(org, repo any) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRepository", reflect.TypeOf((*MockGiteaFileContentAndRepoFetcher)(nil).GetRepository), org, repo)
|
||||
return &MockGiteaFileContentAndRepoFetcherGetRepositoryCall{Call: call}
|
||||
}
|
||||
|
||||
// MockGiteaFileContentAndRepoFetcherGetRepositoryCall wrap *gomock.Call
|
||||
type MockGiteaFileContentAndRepoFetcherGetRepositoryCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) Return(arg0 *models.Repository, arg1 error) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) Do(f func(string, string) (*models.Repository, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) DoAndReturn(f func(string, string) (*models.Repository, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// GetRepositoryFileContent mocks base method.
|
||||
func (m *MockGiteaFileContentAndRepoFetcher) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetRepositoryFileContent", org, repo, hash, path)
|
||||
ret0, _ := ret[0].([]byte)
|
||||
ret1, _ := ret[1].(string)
|
||||
ret2, _ := ret[2].(error)
|
||||
return ret0, ret1, ret2
|
||||
}
|
||||
|
||||
// GetRepositoryFileContent indicates an expected call of GetRepositoryFileContent.
|
||||
func (mr *MockGiteaFileContentAndRepoFetcherMockRecorder) GetRepositoryFileContent(org, repo, hash, path any) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRepositoryFileContent", reflect.TypeOf((*MockGiteaFileContentAndRepoFetcher)(nil).GetRepositoryFileContent), org, repo, hash, path)
|
||||
return &MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall{Call: call}
|
||||
}
|
||||
|
||||
// MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall wrap *gomock.Call
|
||||
type MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) Return(arg0 []byte, arg1 string, arg2 error) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
|
||||
c.Call = c.Call.Return(arg0, arg1, arg2)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) Do(f func(string, string, string, string) ([]byte, string, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) DoAndReturn(f func(string, string, string, string) ([]byte, string, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,156 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: maintainership.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
|
||||
//
|
||||
|
||||
// Package mock_common is a generated GoMock package.
|
||||
package mock_common
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
common "src.opensuse.org/autogits/common"
|
||||
models "src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
// MockMaintainershipData is a mock of MaintainershipData interface.
|
||||
type MockMaintainershipData struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockMaintainershipDataMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockMaintainershipDataMockRecorder is the mock recorder for MockMaintainershipData.
|
||||
type MockMaintainershipDataMockRecorder struct {
|
||||
mock *MockMaintainershipData
|
||||
}
|
||||
|
||||
// NewMockMaintainershipData creates a new mock instance.
|
||||
func NewMockMaintainershipData(ctrl *gomock.Controller) *MockMaintainershipData {
|
||||
mock := &MockMaintainershipData{ctrl: ctrl}
|
||||
mock.recorder = &MockMaintainershipDataMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockMaintainershipData) EXPECT() *MockMaintainershipDataMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// IsApproved mocks base method.
|
||||
func (m *MockMaintainershipData) IsApproved(Pkg string, Reviews []*models.PullReview, Submitter string, ReviewGroups []*common.ReviewGroup) bool {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "IsApproved", Pkg, Reviews, Submitter, ReviewGroups)
|
||||
ret0, _ := ret[0].(bool)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// IsApproved indicates an expected call of IsApproved.
|
||||
func (mr *MockMaintainershipDataMockRecorder) IsApproved(Pkg, Reviews, Submitter, ReviewGroups any) *MockMaintainershipDataIsApprovedCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsApproved", reflect.TypeOf((*MockMaintainershipData)(nil).IsApproved), Pkg, Reviews, Submitter, ReviewGroups)
|
||||
return &MockMaintainershipDataIsApprovedCall{Call: call}
|
||||
}
|
||||
|
||||
// MockMaintainershipDataIsApprovedCall wrap *gomock.Call
|
||||
type MockMaintainershipDataIsApprovedCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockMaintainershipDataIsApprovedCall) Return(arg0 bool) *MockMaintainershipDataIsApprovedCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockMaintainershipDataIsApprovedCall) Do(f func(string, []*models.PullReview, string, []*common.ReviewGroup) bool) *MockMaintainershipDataIsApprovedCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockMaintainershipDataIsApprovedCall) DoAndReturn(f func(string, []*models.PullReview, string, []*common.ReviewGroup) bool) *MockMaintainershipDataIsApprovedCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// ListPackageMaintainers mocks base method.
|
||||
func (m *MockMaintainershipData) ListPackageMaintainers(Pkg string, OptionalGroupExpasion []*common.ReviewGroup) []string {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ListPackageMaintainers", Pkg, OptionalGroupExpasion)
|
||||
ret0, _ := ret[0].([]string)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// ListPackageMaintainers indicates an expected call of ListPackageMaintainers.
|
||||
func (mr *MockMaintainershipDataMockRecorder) ListPackageMaintainers(Pkg, OptionalGroupExpasion any) *MockMaintainershipDataListPackageMaintainersCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListPackageMaintainers", reflect.TypeOf((*MockMaintainershipData)(nil).ListPackageMaintainers), Pkg, OptionalGroupExpasion)
|
||||
return &MockMaintainershipDataListPackageMaintainersCall{Call: call}
|
||||
}
|
||||
|
||||
// MockMaintainershipDataListPackageMaintainersCall wrap *gomock.Call
|
||||
type MockMaintainershipDataListPackageMaintainersCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockMaintainershipDataListPackageMaintainersCall) Return(arg0 []string) *MockMaintainershipDataListPackageMaintainersCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockMaintainershipDataListPackageMaintainersCall) Do(f func(string, []*common.ReviewGroup) []string) *MockMaintainershipDataListPackageMaintainersCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockMaintainershipDataListPackageMaintainersCall) DoAndReturn(f func(string, []*common.ReviewGroup) []string) *MockMaintainershipDataListPackageMaintainersCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// ListProjectMaintainers mocks base method.
|
||||
func (m *MockMaintainershipData) ListProjectMaintainers(OptionalGroupExpansion []*common.ReviewGroup) []string {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ListProjectMaintainers", OptionalGroupExpansion)
|
||||
ret0, _ := ret[0].([]string)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// ListProjectMaintainers indicates an expected call of ListProjectMaintainers.
|
||||
func (mr *MockMaintainershipDataMockRecorder) ListProjectMaintainers(OptionalGroupExpansion any) *MockMaintainershipDataListProjectMaintainersCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListProjectMaintainers", reflect.TypeOf((*MockMaintainershipData)(nil).ListProjectMaintainers), OptionalGroupExpansion)
|
||||
return &MockMaintainershipDataListProjectMaintainersCall{Call: call}
|
||||
}
|
||||
|
||||
// MockMaintainershipDataListProjectMaintainersCall wrap *gomock.Call
|
||||
type MockMaintainershipDataListProjectMaintainersCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockMaintainershipDataListProjectMaintainersCall) Return(arg0 []string) *MockMaintainershipDataListProjectMaintainersCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockMaintainershipDataListProjectMaintainersCall) Do(f func([]*common.ReviewGroup) []string) *MockMaintainershipDataListProjectMaintainersCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockMaintainershipDataListProjectMaintainersCall) DoAndReturn(f func([]*common.ReviewGroup) []string) *MockMaintainershipDataListProjectMaintainersCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: obs_utils.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=obs_utils.go -destination=mock/obs_utils.go -typed
|
||||
//
|
||||
|
||||
// Package mock_common is a generated GoMock package.
|
||||
package mock_common
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
common "src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
// MockObsStatusFetcherWithState is a mock of ObsStatusFetcherWithState interface.
|
||||
type MockObsStatusFetcherWithState struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockObsStatusFetcherWithStateMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockObsStatusFetcherWithStateMockRecorder is the mock recorder for MockObsStatusFetcherWithState.
|
||||
type MockObsStatusFetcherWithStateMockRecorder struct {
|
||||
mock *MockObsStatusFetcherWithState
|
||||
}
|
||||
|
||||
// NewMockObsStatusFetcherWithState creates a new mock instance.
|
||||
func NewMockObsStatusFetcherWithState(ctrl *gomock.Controller) *MockObsStatusFetcherWithState {
|
||||
mock := &MockObsStatusFetcherWithState{ctrl: ctrl}
|
||||
mock.recorder = &MockObsStatusFetcherWithStateMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockObsStatusFetcherWithState) EXPECT() *MockObsStatusFetcherWithStateMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// BuildStatusWithState mocks base method.
|
||||
func (m *MockObsStatusFetcherWithState) BuildStatusWithState(project string, opts *common.BuildResultOptions, packages ...string) (*common.BuildResultList, error) {
|
||||
m.ctrl.T.Helper()
|
||||
varargs := []any{project, opts}
|
||||
for _, a := range packages {
|
||||
varargs = append(varargs, a)
|
||||
}
|
||||
ret := m.ctrl.Call(m, "BuildStatusWithState", varargs...)
|
||||
ret0, _ := ret[0].(*common.BuildResultList)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// BuildStatusWithState indicates an expected call of BuildStatusWithState.
|
||||
func (mr *MockObsStatusFetcherWithStateMockRecorder) BuildStatusWithState(project, opts any, packages ...any) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
varargs := append([]any{project, opts}, packages...)
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildStatusWithState", reflect.TypeOf((*MockObsStatusFetcherWithState)(nil).BuildStatusWithState), varargs...)
|
||||
return &MockObsStatusFetcherWithStateBuildStatusWithStateCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsStatusFetcherWithStateBuildStatusWithStateCall wrap *gomock.Call
|
||||
type MockObsStatusFetcherWithStateBuildStatusWithStateCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) Return(arg0 *common.BuildResultList, arg1 error) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) Do(f func(string, *common.BuildResultOptions, ...string) (*common.BuildResultList, error)) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) DoAndReturn(f func(string, *common.BuildResultOptions, ...string) (*common.BuildResultList, error)) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
@@ -600,16 +600,15 @@ func PackageBuildStatusComp(A, B *PackageBuildStatus) int {
|
||||
}
|
||||
|
||||
type BuildResult struct {
|
||||
XMLName xml.Name `xml:"result" json:"xml,omitempty"`
|
||||
Project string `xml:"project,attr"`
|
||||
Repository string `xml:"repository,attr"`
|
||||
Arch string `xml:"arch,attr"`
|
||||
Code string `xml:"code,attr"`
|
||||
Dirty bool `xml:"dirty,attr,omitempty"`
|
||||
ScmSync string `xml:"scmsync,omitempty"`
|
||||
ScmInfo string `xml:"scminfo,omitempty"`
|
||||
Dirty bool `xml:"dirty,attr"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
ScmInfo string `xml:"scminfo"`
|
||||
Status []*PackageBuildStatus `xml:"status"`
|
||||
Binaries []BinaryList `xml:"binarylist,omitempty"`
|
||||
Binaries []BinaryList `xml:"binarylist"`
|
||||
|
||||
LastUpdate time.Time
|
||||
}
|
||||
@@ -636,8 +635,8 @@ type BinaryList struct {
|
||||
}
|
||||
|
||||
type BuildResultList struct {
|
||||
XMLName xml.Name `xml:"resultlist"`
|
||||
State string `xml:"state,attr"`
|
||||
XMLName xml.Name `xml:"resultlist"`
|
||||
State string `xml:"state,attr"`
|
||||
Result []*BuildResult `xml:"result"`
|
||||
|
||||
isLastBuild bool
|
||||
|
||||
265
common/pr.go
265
common/pr.go
@@ -23,8 +23,7 @@ type PRSet struct {
|
||||
PRs []*PRInfo
|
||||
Config *AutogitConfig
|
||||
|
||||
BotUser string
|
||||
HasAutoStaging bool
|
||||
BotUser string
|
||||
}
|
||||
|
||||
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
|
||||
@@ -34,41 +33,6 @@ func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
|
||||
return
|
||||
}
|
||||
|
||||
func (prinfo *PRInfo) RemoveReviewers(gitea GiteaUnreviewTimelineFetcher, Reviewers []string, BotUser string) {
|
||||
org, repo, idx := prinfo.PRComponents()
|
||||
tl, err := gitea.GetTimeline(org, repo, idx)
|
||||
if err != nil {
|
||||
LogError("Failed to fetch timeline for", PRtoString(prinfo.PR), err)
|
||||
}
|
||||
|
||||
// find review request for each reviewer
|
||||
ReviewersToUnrequest := Reviewers
|
||||
ReviewersAlreadyChecked := []string{}
|
||||
|
||||
for _, tlc := range tl {
|
||||
if tlc.Type == TimelineCommentType_ReviewRequested && tlc.Assignee != nil {
|
||||
user := tlc.Assignee.UserName
|
||||
|
||||
if idx := slices.Index(ReviewersToUnrequest, user); idx >= 0 && !slices.Contains(ReviewersAlreadyChecked, user) {
|
||||
if tlc.User != nil && tlc.User.UserName == BotUser {
|
||||
ReviewersAlreadyChecked = append(ReviewersAlreadyChecked, user)
|
||||
continue
|
||||
}
|
||||
ReviewersToUnrequest = slices.Delete(ReviewersToUnrequest, idx, idx+1)
|
||||
if len(Reviewers) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LogDebug("Unrequesting reviewes for", PRtoString(prinfo.PR), ReviewersToUnrequest)
|
||||
err = gitea.UnrequestReview(org, repo, idx, ReviewersToUnrequest...)
|
||||
if err != nil {
|
||||
LogError("Failed to unrequest reviewers for", PRtoString(prinfo.PR), err)
|
||||
}
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
@@ -99,7 +63,7 @@ func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRIn
|
||||
|
||||
var Timeline_RefIssueNotFound error = errors.New("RefIssue not found on the timeline")
|
||||
|
||||
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
|
||||
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
|
||||
timeline, err := gitea.GetTimeline(org, repo, num)
|
||||
if err != nil {
|
||||
LogError("Failed to fetch timeline for", org, repo, "#", num, err)
|
||||
@@ -124,19 +88,14 @@ func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher,
|
||||
}
|
||||
|
||||
pr, err := gitea.GetPullRequest(prjGitOrg, prjGitRepo, issue.Index)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *repository.RepoGetPullRequestNotFound: // deleted?
|
||||
continue
|
||||
default:
|
||||
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
|
||||
continue
|
||||
}
|
||||
switch err.(type) {
|
||||
case *repository.RepoGetPullRequestNotFound: // deleted?
|
||||
continue
|
||||
default:
|
||||
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
|
||||
}
|
||||
|
||||
LogDebug("found ref PR on timeline:", PRtoString(pr))
|
||||
if pr.Base.Name != prjGitBranch {
|
||||
LogDebug(" -> not matching:", pr.Base.Name, prjGitBranch)
|
||||
if pr.Base.Ref != prjGitBranch {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -156,7 +115,7 @@ func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher,
|
||||
return nil, Timeline_RefIssueNotFound
|
||||
}
|
||||
|
||||
func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
var pr *models.PullRequest
|
||||
var err error
|
||||
|
||||
@@ -182,15 +141,6 @@ func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo strin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, pr := range prs {
|
||||
org, repo, idx := pr.PRComponents()
|
||||
reviews, err := FetchGiteaReviews(gitea, org, repo, idx)
|
||||
if err != nil {
|
||||
LogError("Error fetching reviews for", PRtoString(pr.PR), ":", err)
|
||||
}
|
||||
pr.Reviews = reviews
|
||||
}
|
||||
|
||||
return &PRSet{
|
||||
PRs: prs,
|
||||
Config: config,
|
||||
@@ -198,12 +148,6 @@ func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo strin
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (prset *PRSet) RemoveReviewers(gitea GiteaUnreviewTimelineFetcher, reviewers []string) {
|
||||
for _, prinfo := range prset.PRs {
|
||||
prinfo.RemoveReviewers(gitea, reviewers, prset.BotUser)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
|
||||
for _, p := range rs.PRs {
|
||||
if p.PR.Base.RepoID == pr.Base.RepoID &&
|
||||
@@ -289,144 +233,67 @@ next_rs:
|
||||
}
|
||||
|
||||
for _, pr := range prjpr_set {
|
||||
if strings.EqualFold(prinfo.PR.Base.Repo.Owner.UserName, pr.Org) && strings.EqualFold(prinfo.PR.Base.Repo.Name, pr.Repo) && prinfo.PR.Index == pr.Num {
|
||||
if prinfo.PR.Base.Repo.Owner.UserName == pr.Org && prinfo.PR.Base.Repo.Name == pr.Repo && prinfo.PR.Index == pr.Num {
|
||||
continue next_rs
|
||||
}
|
||||
}
|
||||
LogDebug(" PR: ", PRtoString(prinfo.PR), "not found in project git PRSet")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (rs *PRSet) FindMissingAndExtraReviewers(maintainers MaintainershipData, idx int) (missing, extra []string) {
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintainers MaintainershipData) error {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
prjMaintainers := maintainers.ListProjectMaintainers(nil)
|
||||
LogDebug("project maintainers:", prjMaintainers)
|
||||
for _, pr := range rs.PRs {
|
||||
reviewers := []string{}
|
||||
|
||||
pr := rs.PRs[idx]
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
missing = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
|
||||
if rs.HasAutoStaging {
|
||||
missing = append(missing, Bot_BuildReview)
|
||||
}
|
||||
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
|
||||
// only need project maintainer reviews if:
|
||||
// * not created by a bot and has other PRs, or
|
||||
// * not created by maintainer
|
||||
noReviewPRCreators := prjMaintainers
|
||||
if len(rs.PRs) > 1 {
|
||||
noReviewPRCreators = append(noReviewPRCreators, rs.BotUser)
|
||||
}
|
||||
if slices.Contains(noReviewPRCreators, pr.PR.User.UserName) || pr.Reviews.IsReviewedByOneOf(prjMaintainers...) {
|
||||
LogDebug("Project already reviewed by a project maintainer, remove rest")
|
||||
// do not remove reviewers if they are also maintainers
|
||||
prjMaintainers = slices.DeleteFunc(prjMaintainers, func(m string) bool { return slices.Contains(missing, m) })
|
||||
extra = slices.Concat(prjMaintainers, []string{rs.BotUser})
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
|
||||
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
|
||||
if len(rs.PRs) == 1 {
|
||||
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers(nil))
|
||||
}
|
||||
} else {
|
||||
// if bot not created PrjGit or prj maintainer, we need to add project reviewers here
|
||||
if slices.Contains(noReviewPRCreators, pr.PR.User.UserName) {
|
||||
LogDebug("No need for project maintainers")
|
||||
extra = slices.Concat(prjMaintainers, []string{rs.BotUser})
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(nil), maintainers.ListPackageMaintainers(pkg, nil), configReviewers.PkgOptional)
|
||||
}
|
||||
|
||||
slices.Sort(reviewers)
|
||||
reviewers = slices.Compact(reviewers)
|
||||
|
||||
// submitters do not need to review their own work
|
||||
if idx := slices.Index(reviewers, pr.PR.User.UserName); idx != -1 {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
}
|
||||
|
||||
LogDebug("PR: ", pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
LogDebug("reviewers for PR:", reviewers)
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
reviews, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Error fetching reviews:", err)
|
||||
return err
|
||||
}
|
||||
|
||||
for idx := 0; idx < len(reviewers); {
|
||||
user := reviewers[idx]
|
||||
if reviews.HasPendingReviewBy(user) || reviews.IsReviewedBy(user) {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
LogDebug("removing reviewer:", user)
|
||||
} else {
|
||||
LogDebug("Adding prjMaintainers to PrjGit")
|
||||
missing = append(missing, prjMaintainers...)
|
||||
idx++
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
pkgMaintainers := maintainers.ListPackageMaintainers(pkg, nil)
|
||||
Maintainers := slices.Concat(prjMaintainers, pkgMaintainers)
|
||||
noReviewPkgPRCreators := pkgMaintainers
|
||||
|
||||
LogDebug("packakge maintainers:", Maintainers)
|
||||
|
||||
missing = slices.Concat(configReviewers.Pkg, configReviewers.PkgOptional)
|
||||
if slices.Contains(noReviewPkgPRCreators, pr.PR.User.UserName) || pr.Reviews.IsReviewedByOneOf(Maintainers...) {
|
||||
// submitter is maintainer or already reviewed
|
||||
LogDebug("Package reviewed by maintainer (or subitter is maintainer), remove the rest of them")
|
||||
// do not remove reviewers if they are also maintainers
|
||||
Maintainers = slices.DeleteFunc(Maintainers, func(m string) bool { return slices.Contains(missing, m) })
|
||||
extra = slices.Concat(Maintainers, []string{rs.BotUser})
|
||||
} else {
|
||||
// maintainer review is missing
|
||||
LogDebug("Adding package maintainers to package git")
|
||||
missing = append(missing, pkgMaintainers...)
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(missing)
|
||||
missing = slices.Compact(missing)
|
||||
|
||||
slices.Sort(extra)
|
||||
extra = slices.Compact(extra)
|
||||
|
||||
// submitters cannot review their own work
|
||||
if idx := slices.Index(missing, pr.PR.User.UserName); idx != -1 {
|
||||
missing = slices.Delete(missing, idx, idx+1)
|
||||
}
|
||||
|
||||
LogDebug("PR: ", PRtoString(pr.PR))
|
||||
LogDebug(" preliminary add reviewers for PR:", missing)
|
||||
LogDebug(" preliminary rm reviewers for PR:", extra)
|
||||
|
||||
// remove missing reviewers that are already done or already pending
|
||||
for idx := 0; idx < len(missing); {
|
||||
user := missing[idx]
|
||||
if pr.Reviews.HasPendingReviewBy(user) || pr.Reviews.IsReviewedBy(user) {
|
||||
missing = slices.Delete(missing, idx, idx+1)
|
||||
LogDebug(" removing done/pending reviewer:", user)
|
||||
} else {
|
||||
idx++
|
||||
}
|
||||
}
|
||||
|
||||
// remove extra reviews that are actually only pending, and only pending by us
|
||||
for idx := 0; idx < len(extra); {
|
||||
user := extra[idx]
|
||||
rr := pr.Reviews.FindReviewRequester(user)
|
||||
if rr != nil && rr.User.UserName == rs.BotUser && pr.Reviews.HasPendingReviewBy(user) {
|
||||
// good to remove this review
|
||||
idx++
|
||||
} else {
|
||||
// this review should not be considered as extra by us
|
||||
LogDebug(" - cannot find? to remove", user)
|
||||
if rr != nil {
|
||||
LogDebug(" ", rr.User.UserName, "vs.", rs.BotUser, pr.Reviews.HasPendingReviewBy(user))
|
||||
}
|
||||
extra = slices.Delete(extra, idx, idx+1)
|
||||
}
|
||||
}
|
||||
|
||||
LogDebug(" add reviewers for PR:", missing)
|
||||
LogDebug(" rm reviewers for PR:", extra)
|
||||
|
||||
return missing, extra
|
||||
}
|
||||
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequesterAndUnrequester, maintainers MaintainershipData) error {
|
||||
for idx, pr := range rs.PRs {
|
||||
missingReviewers, extraReviewers := rs.FindMissingAndExtraReviewers(maintainers, idx)
|
||||
|
||||
if len(missingReviewers) > 0 {
|
||||
LogDebug(" Requesting reviews from:", missingReviewers)
|
||||
// get maintainers associated with the PR too
|
||||
if len(reviewers) > 0 {
|
||||
LogDebug("Requesting reviews from:", reviewers)
|
||||
if !IsDryRun {
|
||||
for _, r := range missingReviewers {
|
||||
for _, r := range reviewers {
|
||||
if _, err := gitea.RequestReviews(pr.PR, r); err != nil {
|
||||
LogError("Cannot create reviews on", PRtoString(pr.PR), "for user:", r, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(extraReviewers) > 0 {
|
||||
LogDebug(" UnRequesting reviews from:", extraReviewers)
|
||||
if !IsDryRun {
|
||||
for _, r := range extraReviewers {
|
||||
org, repo, idx := pr.PRComponents()
|
||||
if err := gitea.UnrequestReview(org, repo, idx, r); err != nil {
|
||||
LogError("Cannot unrequest reviews on", PRtoString(pr.PR), "for user:", r, err)
|
||||
LogError("Cannot create reviews on", fmt.Sprintf("%s/%s!%d for [%s]", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", ")), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -452,12 +319,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
if err == nil && prjgit != nil {
|
||||
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers(groups))
|
||||
LogDebug("Fetching reviews for", prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gita reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
prjgit.Reviews = r
|
||||
if prjgit.Reviews.IsManualMergeOK() {
|
||||
is_manually_reviewed_ok = true
|
||||
@@ -473,12 +339,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg, groups))
|
||||
LogDebug("Fetching reviews for", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gita reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
pr.Reviews = r
|
||||
if !pr.Reviews.IsManualMergeOK() {
|
||||
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
|
||||
@@ -500,9 +365,6 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
var pkg string
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
if rs.HasAutoStaging {
|
||||
reviewers = append(reviewers, Bot_BuildReview)
|
||||
}
|
||||
pkg = ""
|
||||
} else {
|
||||
reviewers = configReviewers.Pkg
|
||||
@@ -514,12 +376,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
return false
|
||||
}
|
||||
|
||||
r, err := FetchGiteaReviews(gitea, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gitea reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
|
||||
is_manually_reviewed_ok = r.IsApproved()
|
||||
LogDebug("PR to", pr.PR.Base.Repo.Name, "reviewed?", is_manually_reviewed_ok)
|
||||
@@ -532,7 +393,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
|
||||
if need_maintainer_review := !rs.IsPrjGitPR(pr.PR) || pr.PR.User.UserName != rs.BotUser; need_maintainer_review {
|
||||
// Do not expand groups here, as the group-review-bot will ACK if group has reviewed.
|
||||
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.Reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
|
||||
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
|
||||
LogDebug(" not approved?", pkg)
|
||||
return false
|
||||
}
|
||||
@@ -580,7 +441,6 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
panic("Can't handle conflicts yet")
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
@@ -589,10 +449,10 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
/*s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
}*/
|
||||
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
@@ -602,10 +462,11 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
/*
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
}*/
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
@@ -615,11 +476,11 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// merge from subs3 (target), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
mergedSubs := slices.Concat(subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs2), len(subs3))+5)
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
@@ -14,23 +15,22 @@ import (
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
/*
|
||||
func TestCockpit(t *testing.T) {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
|
||||
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Fail to timeline", err)
|
||||
}
|
||||
t.Log(tl)
|
||||
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Error:", err)
|
||||
}
|
||||
|
||||
t.Error(r)
|
||||
func TestCockpit(t *testing.T) {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
|
||||
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Fail to timeline", err)
|
||||
}
|
||||
t.Log(tl)
|
||||
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Error:", err)
|
||||
}
|
||||
|
||||
t.Error(r)
|
||||
}
|
||||
*/
|
||||
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
|
||||
timeline := make([]*models.TimelineComment, len(reviews))
|
||||
@@ -75,7 +75,7 @@ func TestPR(t *testing.T) {
|
||||
consistentSet bool
|
||||
prjGitPRIndex int
|
||||
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error)
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error)
|
||||
}{
|
||||
{
|
||||
name: "Error fetching PullRequest",
|
||||
@@ -147,7 +147,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
@@ -179,7 +179,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
@@ -207,7 +207,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -241,7 +241,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -275,7 +275,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -311,7 +311,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -346,7 +346,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -388,7 +388,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -430,7 +430,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -473,7 +473,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -500,7 +500,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
config := common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2", "~*bot"},
|
||||
Branch: "branch",
|
||||
@@ -515,7 +515,7 @@ func TestPR(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaPRChecker(ctl)
|
||||
// reviewer_mock := mock_common.NewMockGiteaReviewRequester(ctl)
|
||||
|
||||
@@ -619,514 +619,288 @@ func TestPR(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindMissingAndExtraReviewers(t *testing.T) {
|
||||
func TestPRAssignReviewers(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prset *common.PRSet
|
||||
maintainers common.MaintainershipData
|
||||
pkgReviews []*models.PullReview
|
||||
pkgTimeline []*models.TimelineComment
|
||||
prjReviews []*models.PullReview
|
||||
prjTimeline []*models.TimelineComment
|
||||
|
||||
noAutoStaging bool
|
||||
|
||||
expected_missing_reviewers [][]string
|
||||
expected_extra_reviewers [][]string
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "No reviewers",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only and no auto staging",
|
||||
noAutoStaging: true,
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
nil,
|
||||
{"user1"},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer and one pkg reviewer only",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{"user2"},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"user2", "prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter reviewer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "m1"}}},
|
||||
RequestedReviewers: []string{"m1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "m1"}, Type: common.TimelineCommentType_ReviewRequested},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"m1", "submitter"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
nil,
|
||||
{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
expected_extra_reviewers: [][]string{
|
||||
{"m1"},
|
||||
name: "No need to get reviews of submitter",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter maintainer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
name: "Reviews are done",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Add reviewer if also maintainer where review by maintainer is not needed",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter", "*reviewer"},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter", "reviewer"}, "": []string{"reviewer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{"reviewer"},
|
||||
[]string{"autogits_obs_staging_bot", "reviewer", "user1"},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Dont remove reviewer if also maintainer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "reviewer"}}},
|
||||
RequestedReviewers: []string{"reviewer"},
|
||||
FullTimeline: []*models.TimelineComment{{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "reviewer"}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "reviewer"}}},
|
||||
RequestedReviewers: []string{"reviewer"},
|
||||
FullTimeline: []*models.TimelineComment{{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "reviewer"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter", "*reviewer"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter", "reviewer"}, "": []string{"reviewer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Extra project reviewer on the package",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prjmaintainer"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "user2"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "user1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer"}, "": {"prjmaintainer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{},
|
||||
expected_extra_reviewers: [][]string{{"prjmaintainer"}},
|
||||
expectedReviewerCall: [2][]string{},
|
||||
},
|
||||
{
|
||||
name: "Extra project reviewers on the package and project",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prjmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "pkgm1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "pkgm2"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj2"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "someother"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer", "pkgm1", "pkgm2", "someother", "prj1", "prj2"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj2"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm2"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj2"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot", "prj1", "prj2"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj2"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
name: "Stale review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "org/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer", "pkgm1", "pkgm2"}, "": {"prjmaintainer", "prj1", "prj2"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{},
|
||||
expected_extra_reviewers: [][]string{{"pkgm1", "pkgm2", "prj1", "prj2", "prjmaintainer"}, {"prj1", "prj2"}},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No extra project reviewers on the package and project (all pending)",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prjmaintainer"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "pkgm1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "someother"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer", "pkgm1", "someother", "prj1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "!bot"}, Assignee: &models.User{UserName: "someother"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prj"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prj1"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot", "prj1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "!bot"}, Assignee: &models.User{UserName: "user1"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prj/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
name: "Stale optional review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2", "~bot"},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer", "pkgm1", "pkgm2"}, "": {"prjmaintainer", "prj1", "prj2"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{{"pkgm2", "prj2"}},
|
||||
expected_extra_reviewers: [][]string{{}, {"prj1"}},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "bot"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer", "bot"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
test.prset.HasAutoStaging = !test.noAutoStaging
|
||||
for idx, pr := range test.prset.PRs {
|
||||
missing, extra := test.prset.FindMissingAndExtraReviewers(test.maintainers, idx)
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
// avoid nil dereference below, by adding empty array elements
|
||||
if idx >= len(test.expected_missing_reviewers) {
|
||||
test.expected_missing_reviewers = append(test.expected_missing_reviewers, nil)
|
||||
}
|
||||
if idx >= len(test.expected_extra_reviewers) {
|
||||
test.expected_extra_reviewers = append(test.expected_extra_reviewers, nil)
|
||||
}
|
||||
if test.pkgTimeline == nil {
|
||||
test.pkgTimeline = reviewsToTimeline(test.pkgReviews)
|
||||
}
|
||||
if test.prjTimeline == nil {
|
||||
test.prjTimeline = reviewsToTimeline(test.prjReviews)
|
||||
}
|
||||
|
||||
slices.Sort(test.expected_extra_reviewers[idx])
|
||||
slices.Sort(test.expected_missing_reviewers[idx])
|
||||
if slices.Compare(missing, test.expected_missing_reviewers[idx]) != 0 {
|
||||
t.Error("Expected missing reviewers for", common.PRtoString(pr.PR), ":", test.expected_missing_reviewers[idx], "but have:", missing)
|
||||
}
|
||||
pr_mock.EXPECT().GetPullRequest("other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "pkgrepo", Owner: &models.User{UserName: "other"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("other", "pkgrepo", int64(1)).Return(test.pkgReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("other", "pkgrepo", int64(1)).Return(test.pkgTimeline, nil)
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: fmt.Sprintf(common.PrPattern, "other", "pkgrepo", 1),
|
||||
User: &models.User{UserName: "bot1"},
|
||||
RequestedReviewers: []*models.User{{UserName: "main_reviewer"}},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 42,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("org", "repo", int64(42)).Return(test.prjTimeline, nil)
|
||||
|
||||
if slices.Compare(extra, test.expected_extra_reviewers[idx]) != 0 {
|
||||
t.Error("Expected reviewers to remove for", common.PRtoString(pr.PR), ":", test.expected_extra_reviewers[idx], "but have:", extra)
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo", gomock.Any()).Return([]string{"pkgmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet("test", pr_mock, "other", "pkgrepo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 2 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
r = test.expectedReviewerCall[1]
|
||||
}
|
||||
slices.Sort(r)
|
||||
for _, reviewer := range r {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, reviewer).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
|
||||
prjgit_tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "PrjMaintainers in prjgit review when not part of pkg set",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "org/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot", "prjmaintainer"}},
|
||||
},
|
||||
}
|
||||
for _, test := range prjgit_tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("org", "repo", int64(1)).Return(nil, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet("test", pr_mock, "org", "repo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 1 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
t.Fatal("only prjgit pr here")
|
||||
}
|
||||
for _, reviewer := range r {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, reviewer).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRMerge(t *testing.T) {
|
||||
t.Skip("FAIL: No PrjGit PR found, missing calls")
|
||||
repoDir := t.TempDir()
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
@@ -1203,7 +977,7 @@ func TestPRMerge(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
reviewUnrequestMock := mock_common.NewMockGiteaReviewUnrequester(ctl)
|
||||
|
||||
reviewUnrequestMock.EXPECT().UnrequestReview(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil)
|
||||
@@ -1231,7 +1005,6 @@ func TestPRMerge(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPRChanges(t *testing.T) {
|
||||
t.Skip("FAIL: unexpected calls, missing calls")
|
||||
tests := []struct {
|
||||
name string
|
||||
PRs []*models.PullRequest
|
||||
@@ -1262,7 +1035,7 @@ func TestPRChanges(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock_fetcher := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
mock_fetcher := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
mock_fetcher.EXPECT().GetPullRequest("org", "prjgit", int64(42)).Return(test.PrjPRs, nil)
|
||||
for _, pr := range test.PRs {
|
||||
mock_fetcher.EXPECT().GetPullRequest(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index).Return(pr, nil)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
)
|
||||
|
||||
type Reviewers struct {
|
||||
Prj []string
|
||||
Pkg []string
|
||||
@@ -32,5 +36,10 @@ func ParseReviewers(input []string) *Reviewers {
|
||||
*pkg = append(*pkg, reviewer)
|
||||
}
|
||||
}
|
||||
|
||||
if !slices.Contains(r.Prj, Bot_BuildReview) {
|
||||
r.Prj = append(r.Prj, Bot_BuildReview)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
@@ -21,14 +21,14 @@ func TestReviewers(t *testing.T) {
|
||||
name: "project and package reviewers",
|
||||
input: []string{"1", "2", "3", "*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"5", "7"},
|
||||
prj: []string{"5", "7", common.Bot_BuildReview},
|
||||
pkg: []string{"1", "2", "3", "5", "6"},
|
||||
},
|
||||
{
|
||||
name: "optional project and package reviewers",
|
||||
input: []string{"~1", "2", "3", "~*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"7"},
|
||||
prj: []string{"7", common.Bot_BuildReview},
|
||||
pkg: []string{"2", "3", "6"},
|
||||
prj_optional: []string{"5"},
|
||||
pkg_optional: []string{"1", "5"},
|
||||
|
||||
@@ -9,14 +9,12 @@ import (
|
||||
)
|
||||
|
||||
type PRReviews struct {
|
||||
Reviews []*models.PullReview
|
||||
RequestedReviewers []string
|
||||
Comments []*models.TimelineComment
|
||||
|
||||
FullTimeline []*models.TimelineComment
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
comments []*models.TimelineComment
|
||||
}
|
||||
|
||||
func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64) (*PRReviews, error) {
|
||||
func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, repo string, no int64) (*PRReviews, error) {
|
||||
timeline, err := rf.GetTimeline(org, repo, no)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -27,14 +25,10 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reviews := make([]*models.PullReview, 0, 10)
|
||||
needNewReviews := []string{}
|
||||
reviews := make([]*models.PullReview, 0, len(reviewers))
|
||||
var comments []*models.TimelineComment
|
||||
|
||||
alreadyHaveUserReview := func(user string) bool {
|
||||
if slices.Contains(needNewReviews, user) {
|
||||
return true
|
||||
}
|
||||
for _, r := range reviews {
|
||||
if r.User != nil && r.User.UserName == user {
|
||||
return true
|
||||
@@ -43,40 +37,32 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64
|
||||
return false
|
||||
}
|
||||
|
||||
LogDebug("FetchingGiteaReviews for", org, repo, no)
|
||||
LogDebug("Number of reviews:", len(rawReviews))
|
||||
LogDebug("Number of items in timeline:", len(timeline))
|
||||
|
||||
cutOffIdx := len(timeline)
|
||||
for idx, item := range timeline {
|
||||
if item.Type == TimelineCommentType_Review || item.Type == TimelineCommentType_ReviewRequested {
|
||||
if item.Type == TimelineCommentType_Review {
|
||||
for _, r := range rawReviews {
|
||||
if r.ID == item.ReviewID {
|
||||
if !alreadyHaveUserReview(r.User.UserName) {
|
||||
if item.Type == TimelineCommentType_Review && idx > cutOffIdx {
|
||||
needNewReviews = append(needNewReviews, r.User.UserName)
|
||||
} else {
|
||||
reviews = append(reviews, r)
|
||||
}
|
||||
reviews = append(reviews, r)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if item.Type == TimelineCommentType_Comment && cutOffIdx > idx {
|
||||
} else if item.Type == TimelineCommentType_Comment {
|
||||
comments = append(comments, item)
|
||||
} else if item.Type == TimelineCommentType_PushPull && cutOffIdx == len(timeline) {
|
||||
LogDebug("cut-off", item.Created, "@", idx)
|
||||
cutOffIdx = idx
|
||||
} else if item.Type == TimelineCommentType_PushPull {
|
||||
LogDebug("cut-off", item.Created)
|
||||
timeline = timeline[0:idx]
|
||||
break
|
||||
} else {
|
||||
LogDebug("Unhandled timeline type:", item.Type)
|
||||
}
|
||||
}
|
||||
LogDebug("num comments:", len(comments), "timeline:", len(reviews))
|
||||
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
|
||||
|
||||
return &PRReviews{
|
||||
Reviews: reviews,
|
||||
Comments: comments,
|
||||
FullTimeline: timeline,
|
||||
reviews: reviews,
|
||||
reviewers: reviewers,
|
||||
comments: comments,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -95,27 +81,23 @@ func bodyCommandManualMergeOK(body string) bool {
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsManualMergeOK() bool {
|
||||
if r == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, c := range r.Comments {
|
||||
for _, c := range r.comments {
|
||||
if c.Updated != c.Created {
|
||||
continue
|
||||
}
|
||||
LogDebug("comment:", c.User.UserName, c.Body)
|
||||
if slices.Contains(r.RequestedReviewers, c.User.UserName) {
|
||||
if slices.Contains(r.reviewers, c.User.UserName) {
|
||||
if bodyCommandManualMergeOK(c.Body) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, c := range r.Reviews {
|
||||
for _, c := range r.reviews {
|
||||
if c.Updated != c.Submitted {
|
||||
continue
|
||||
}
|
||||
if slices.Contains(r.RequestedReviewers, c.User.UserName) {
|
||||
if slices.Contains(r.reviewers, c.User.UserName) {
|
||||
if bodyCommandManualMergeOK(c.Body) {
|
||||
return true
|
||||
}
|
||||
@@ -126,14 +108,11 @@ func (r *PRReviews) IsManualMergeOK() bool {
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsApproved() bool {
|
||||
if r == nil {
|
||||
return false
|
||||
}
|
||||
goodReview := true
|
||||
|
||||
for _, reviewer := range r.RequestedReviewers {
|
||||
for _, reviewer := range r.reviewers {
|
||||
goodReview = false
|
||||
for _, review := range r.Reviews {
|
||||
for _, review := range r.reviews {
|
||||
if review.User.UserName == reviewer && review.State == ReviewStateApproved && !review.Stale && !review.Dismissed {
|
||||
LogDebug(" -- found review: ", review.User.UserName)
|
||||
goodReview = true
|
||||
@@ -151,11 +130,7 @@ func (r *PRReviews) IsApproved() bool {
|
||||
|
||||
func (r *PRReviews) MissingReviews() []string {
|
||||
missing := []string{}
|
||||
if r == nil {
|
||||
return missing
|
||||
}
|
||||
|
||||
for _, reviewer := range r.RequestedReviewers {
|
||||
for _, reviewer := range r.reviewers {
|
||||
if !r.IsReviewedBy(reviewer) {
|
||||
missing = append(missing, reviewer)
|
||||
}
|
||||
@@ -163,64 +138,45 @@ func (r *PRReviews) MissingReviews() []string {
|
||||
return missing
|
||||
}
|
||||
|
||||
func (r *PRReviews) FindReviewRequester(reviewer string) *models.TimelineComment {
|
||||
if r == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, r := range r.FullTimeline {
|
||||
if r.Type == TimelineCommentType_ReviewRequested && r.Assignee.UserName == reviewer {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
|
||||
if r == nil {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.Reviews {
|
||||
if r.User.UserName == reviewer {
|
||||
isPending := false
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateRequestReview, ReviewStatePending:
|
||||
return true
|
||||
default:
|
||||
case ReviewStateApproved:
|
||||
fallthrough
|
||||
case ReviewStateRequestChanges:
|
||||
return false
|
||||
case ReviewStateRequestReview:
|
||||
fallthrough
|
||||
case ReviewStatePending:
|
||||
isPending = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return isPending
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedBy(reviewer string) bool {
|
||||
if r == nil {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.Reviews {
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved, ReviewStateRequestChanges:
|
||||
case ReviewStateApproved:
|
||||
return true
|
||||
case ReviewStateRequestChanges:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedByOneOf(reviewers ...string) bool {
|
||||
for _, reviewer := range reviewers {
|
||||
if r.IsReviewedBy(reviewer) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -62,23 +62,11 @@ func TestReviews(t *testing.T) {
|
||||
{
|
||||
name: "Two reviewer, one stale and pending",
|
||||
reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale and pending, other done",
|
||||
reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
isPendingByTest1: false,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
@@ -151,7 +139,7 @@ func TestReviews(t *testing.T) {
|
||||
rf.EXPECT().GetTimeline("test", "pr", int64(1)).Return(test.timeline, nil)
|
||||
rf.EXPECT().GetPullRequestReviews("test", "pr", int64(1)).Return(test.reviews, test.fetchErr)
|
||||
|
||||
reviews, err := common.FetchGiteaReviews(rf, "test", "pr", 1)
|
||||
reviews, err := common.FetchGiteaReviews(rf, test.reviewers, "test", "pr", 1)
|
||||
|
||||
if test.fetchErr != nil {
|
||||
if err != test.fetchErr {
|
||||
@@ -159,7 +147,6 @@ func TestReviews(t *testing.T) {
|
||||
}
|
||||
return
|
||||
}
|
||||
reviews.RequestedReviewers = test.reviewers
|
||||
|
||||
if r := reviews.IsApproved(); r != test.isApproved {
|
||||
t.Fatal("Unexpected IsReviewed():", r, "vs. expected", test.isApproved)
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
git init -q --bare --object-format=sha256
|
||||
git config user.email test@example.com
|
||||
git config user.name Test
|
||||
export GIT_AUTHOR_DATE=2025-10-27T14:20:07+01:00
|
||||
export GIT_COMMITTER_DATE=2025-10-27T14:20:07+01:00
|
||||
|
||||
# 81aba862107f1e2f5312e165453955485f424612f313d6c2fb1b31fef9f82a14
|
||||
blobA=$(echo "help" | git hash-object --stdin -w)
|
||||
|
||||
110
common/utils.go
110
common/utils.go
@@ -27,87 +27,10 @@ import (
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type NewRepos struct {
|
||||
Repos []struct {
|
||||
Organization, Repository, Branch string
|
||||
PackageName string
|
||||
}
|
||||
IsMaintainer bool
|
||||
}
|
||||
|
||||
const maintainership_line = "MAINTAINER"
|
||||
|
||||
var true_lines []string = []string{"1", "TRUE", "YES", "OK", "T"}
|
||||
|
||||
func HasSpace(s string) bool {
|
||||
return strings.IndexFunc(s, unicode.IsSpace) >= 0
|
||||
}
|
||||
|
||||
func FindNewReposInIssueBody(body string) *NewRepos {
|
||||
Issues := &NewRepos{}
|
||||
for _, line := range strings.Split(body, "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if ul := strings.ToUpper(line); strings.HasPrefix(ul, "MAINTAINER") {
|
||||
value := ""
|
||||
if idx := strings.IndexRune(ul, ':'); idx > 0 && len(ul) > idx+2 {
|
||||
value = ul[idx+1:]
|
||||
} else if idx := strings.IndexRune(ul, ' '); idx > 0 && len(ul) > idx+2 {
|
||||
value = ul[idx+1:]
|
||||
}
|
||||
|
||||
if slices.Contains(true_lines, strings.TrimSpace(value)) {
|
||||
Issues.IsMaintainer = true
|
||||
}
|
||||
}
|
||||
// line = strings.TrimSpace(line)
|
||||
issue := struct{ Organization, Repository, Branch, PackageName string }{}
|
||||
|
||||
branch := strings.Split(line, "#")
|
||||
repo := strings.Split(branch[0], "/")
|
||||
|
||||
if len(branch) == 2 {
|
||||
issue.Branch = strings.TrimSpace(branch[1])
|
||||
}
|
||||
if len(repo) == 2 {
|
||||
issue.Organization = strings.TrimSpace(repo[0])
|
||||
issue.Repository = strings.TrimSpace(repo[1])
|
||||
issue.PackageName = issue.Repository
|
||||
|
||||
if idx := strings.Index(strings.ToUpper(issue.Branch), " AS "); idx > 0 && len(issue.Branch) > idx+5 {
|
||||
issue.PackageName = strings.TrimSpace(issue.Branch[idx+3:])
|
||||
issue.Branch = strings.TrimSpace(issue.Branch[0:idx])
|
||||
}
|
||||
|
||||
if HasSpace(issue.Organization) || HasSpace(issue.Repository) || HasSpace(issue.PackageName) || HasSpace(issue.Branch) {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
Issues.Repos = append(Issues.Repos, issue)
|
||||
//PackageNameIdx := strings.Index(strings.ToUpper(line), " AS ")
|
||||
//words := strings.Split(line)
|
||||
}
|
||||
|
||||
if len(Issues.Repos) == 0 {
|
||||
return nil
|
||||
}
|
||||
return Issues
|
||||
}
|
||||
|
||||
func IssueToString(issue *models.Issue) string {
|
||||
if issue == nil {
|
||||
return "(nil)"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s/%s#%d", issue.Repository.Owner, issue.Repository.Name, issue.Index)
|
||||
}
|
||||
|
||||
func SplitLines(str string) []string {
|
||||
return SplitStringNoEmpty(str, "\n")
|
||||
}
|
||||
@@ -245,10 +168,9 @@ func FetchDevelProjects() (DevelProjects, error) {
|
||||
}
|
||||
|
||||
var DevelProjectNotFound = errors.New("Devel project not found")
|
||||
|
||||
func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
|
||||
for _, item := range d {
|
||||
if item.Package == pkg {
|
||||
if item.Package == pkg {
|
||||
return item.Project, nil
|
||||
}
|
||||
}
|
||||
@@ -256,33 +178,3 @@ func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
|
||||
return "", DevelProjectNotFound
|
||||
}
|
||||
|
||||
var removedBranchNameSuffixes []string = []string{
|
||||
"-rm",
|
||||
"-removed",
|
||||
"-deleted",
|
||||
}
|
||||
|
||||
func findRemovedBranchSuffix(branchName string) string {
|
||||
branchName = strings.ToLower(branchName)
|
||||
|
||||
for _, suffix := range removedBranchNameSuffixes {
|
||||
if len(suffix) < len(branchName) && strings.HasSuffix(branchName, suffix) {
|
||||
return suffix
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func IsRemovedBranch(branchName string) bool {
|
||||
return len(findRemovedBranchSuffix(branchName)) > 0
|
||||
}
|
||||
|
||||
func TrimRemovedBranchSuffix(branchName string) string {
|
||||
suffix := findRemovedBranchSuffix(branchName)
|
||||
if len(suffix) > 0 {
|
||||
return branchName[0 : len(branchName)-len(suffix)]
|
||||
}
|
||||
|
||||
return branchName
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
@@ -166,142 +165,3 @@ func TestRemoteName(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemovedBranchName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
branchName string
|
||||
isRemoved bool
|
||||
regularName string
|
||||
}{
|
||||
{
|
||||
name: "Empty branch",
|
||||
},
|
||||
{
|
||||
name: "Removed suffix only",
|
||||
branchName: "-rm",
|
||||
isRemoved: false,
|
||||
regularName: "-rm",
|
||||
},
|
||||
{
|
||||
name: "Capital suffix",
|
||||
branchName: "Foo-Rm",
|
||||
isRemoved: true,
|
||||
regularName: "Foo",
|
||||
},
|
||||
{
|
||||
name: "Other suffixes",
|
||||
isRemoved: true,
|
||||
branchName: "Goo-Rm-DeleteD",
|
||||
regularName: "Goo-Rm",
|
||||
},
|
||||
{
|
||||
name: "Other suffixes",
|
||||
isRemoved: true,
|
||||
branchName: "main-REMOVED",
|
||||
regularName: "main",
|
||||
},
|
||||
{
|
||||
name: "Not removed separator",
|
||||
isRemoved: false,
|
||||
branchName: "main;REMOVED",
|
||||
regularName: "main;REMOVED",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if r := common.IsRemovedBranch(test.branchName); r != test.isRemoved {
|
||||
t.Error("Expecting isRemoved:", test.isRemoved, "but received", r)
|
||||
}
|
||||
|
||||
if tn := common.TrimRemovedBranchSuffix(test.branchName); tn != test.regularName {
|
||||
t.Error("Expected stripped branch name to be:", test.regularName, "but have:", tn)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewPackageIssueParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
issues *common.NewRepos
|
||||
}{
|
||||
{
|
||||
name: "Nothing",
|
||||
},
|
||||
{
|
||||
name: "Basic repo",
|
||||
input: "org/repo#branch",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and approval for maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: yes",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "", PackageName: "repo2"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and no maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: NEVER",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "", PackageName: "repo2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "3 repos with comments and maintainership",
|
||||
input: "\n\nsome comments for org1/repo2 are here and more\n\norg1/repo2#master\n org2/repo3#master\n some/repo3#m\nMaintainer ok",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "master", PackageName: "repo2"},
|
||||
{Organization: "org2", Repository: "repo3", Branch: "master", PackageName: "repo3"},
|
||||
{Organization: "some", Repository: "repo3", Branch: "m", PackageName: "repo3"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Invalid repos with spaces",
|
||||
input: "or g/repo#branch\norg/r epo#branch\norg/repo#br anch\norg/repo#branch As foo ++",
|
||||
},
|
||||
{
|
||||
name: "Valid repos with spaces",
|
||||
input: " org / repo # branch",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Package name is not repo name",
|
||||
input: " org / repo # branch as repo++ \nmaintainer true",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo++"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
issue := common.FindNewReposInIssueBody(test.input)
|
||||
if !reflect.DeepEqual(test.issues, issue) {
|
||||
t.Error("Expected", test.issues, "but have", issue)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,42 +58,13 @@ sub ListPackages {
|
||||
return @packages;
|
||||
}
|
||||
|
||||
sub FactoryMd5 {
|
||||
my ($package) = @_;
|
||||
my $out = "";
|
||||
|
||||
if (system("osc ls openSUSE:Factory $package | grep -q build.specials.obscpio") == 0) {
|
||||
system("mkdir _extract") == 0 || die "_extract exists or can't make it. Aborting.";
|
||||
chdir("_extract") || die;
|
||||
system("osc cat openSUSE:Factory $package build.specials.obscpio | cpio -dium 2> /dev/null") == 0 || die;
|
||||
system("rm .* 2> /dev/null");
|
||||
open( my $fh, "find -type f -exec /usr/bin/basename {} \\; | xargs md5sum | awk '{print \$1 FS \$2}' | grep -v d41d8cd98f00b204e9800998ecf8427e |") or die;
|
||||
while ( my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
chdir("..") && system("rm -rf _extract") == 0 || die;
|
||||
}
|
||||
open( my $fh, "osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' |") or die;
|
||||
while (my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
return $out;
|
||||
}
|
||||
|
||||
# Read project from first argument
|
||||
sub Usage {
|
||||
die "Usage: $0 <OBS Project> [org [package]]";
|
||||
die "Usage: $0 <OBS Project> <org>";
|
||||
}
|
||||
|
||||
my $project = shift or Usage();
|
||||
my $org = shift;
|
||||
|
||||
if (not defined($org)) {
|
||||
$org = `osc meta prj $project | grep scmsync | sed -e 's,^.*src.opensuse.org/\\(.*\\)/_ObsPrj.*,\\1,'`;
|
||||
chomp($org);
|
||||
}
|
||||
my $org = shift or Usage();
|
||||
|
||||
my @packages = ListPackages($project);
|
||||
my $pkg = shift;
|
||||
@@ -102,28 +73,13 @@ my $pkg = shift;
|
||||
my @tomove;
|
||||
my @toremove;
|
||||
|
||||
if ( ! -e $org ) {
|
||||
mkdir($org);
|
||||
}
|
||||
chdir($org);
|
||||
print "Verify packages in /pool for $org package in $project\n";
|
||||
|
||||
my $super_user = $ENV{SUPER};
|
||||
if (defined($super_user)) {
|
||||
$super_user = "-G $super_user";
|
||||
} else {
|
||||
$super_user = "";
|
||||
}
|
||||
|
||||
my @missing;
|
||||
print "Verify packages in /pool ...\n";
|
||||
|
||||
# verify that packages in devel project is a fork from pool.
|
||||
for my $pkg ( sort(@packages) ) {
|
||||
my $data = `git obs api /repos/$org/$pkg 2> /dev/null`;
|
||||
if ( length($data) == 0 ) {
|
||||
print "***** Repo missing in $org: $pkg\n";
|
||||
push(@missing, $pkg);
|
||||
next;
|
||||
die "Repo missing in $org: $pkg";
|
||||
}
|
||||
else {
|
||||
my $repo = decode_json($data);
|
||||
@@ -141,15 +97,6 @@ for my $pkg ( sort(@packages) ) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( scalar @missing > 0 ) {
|
||||
for my $pkg (@missing) {
|
||||
my $index = 0;
|
||||
$index++ until $packages[$index] eq $pkg;
|
||||
splice(@packages, $index, 1);
|
||||
}
|
||||
}
|
||||
|
||||
if ( scalar @toremove > 0 ) {
|
||||
print "ABORTING. Need repos removed.\n";
|
||||
print "@toremove\n";
|
||||
@@ -157,15 +104,13 @@ if ( scalar @toremove > 0 ) {
|
||||
}
|
||||
|
||||
if ( scalar @tomove > 0 ) {
|
||||
for my $pkg (@tomove) {
|
||||
system("git obs $super_user api -X POST --data '{\"reparent\": true, \"organization\": \"pool\"}' /repos/$org/$pkg/forks") == 0 and
|
||||
system("git clone gitea\@src.opensuse.org:pool/$pkg") == 0 and
|
||||
system("git -C $pkg checkout -B factory HEAD") == 0 and
|
||||
system("git -C $pkg push origin factory") == 0 and
|
||||
system("git obs $super_user api -X PATCH --data '{\"default_branch\": \"factory\"}' /repos/pool/$pkg") == 0
|
||||
or die "Error in creating a pool repo";
|
||||
system("for i in \$(git -C $pkg for-each-ref --format='%(refname:lstrip=3)' refs/remotes/origin/ | grep -v '\\(^HEAD\$\\|^factory\$\\)'); do git -C $pkg push origin :\$i; done") == 0 or die "failed to cull branches";
|
||||
}
|
||||
|
||||
# for my $pkg (@tomove) {
|
||||
# system("git obs api -X POST --data '{\"new_owner\": \"pool\"}' /repos/$org/$pkg/transfer 2> /dev/null > /dev/null");
|
||||
# }
|
||||
print "ABORTING. Need to move reps.\n";
|
||||
print "Initiated transfer for @tomove\n";
|
||||
exit(1);
|
||||
}
|
||||
|
||||
print "Verify complete.\n";
|
||||
@@ -192,7 +137,6 @@ for my $package ( sort(@packages) ) {
|
||||
or ( push( @tomove, $package ) and die "Can't fetch pool for $package" );
|
||||
|
||||
my @commits = FindFactoryCommit($package);
|
||||
my $Md5Hashes = FactoryMd5($package);
|
||||
my $c;
|
||||
my $match = 0;
|
||||
for my $commit (@commits) {
|
||||
@@ -205,27 +149,15 @@ for my $package ( sort(@packages) ) {
|
||||
system("git -C $package lfs fetch pool $commit") == 0
|
||||
and system("git -C $package checkout -B factory $commit") == 0
|
||||
and system("git -C $package lfs checkout") == 0
|
||||
and chdir($package)) {
|
||||
|
||||
open(my $fh, "|-", "md5sum -c --quiet") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
if ($? >> 8 != 0) {
|
||||
chdir("..") || die;
|
||||
next;
|
||||
}
|
||||
open($fh, "|-", "awk '{print \$2}' | sort | bash -c \"diff <(ls -1 | sort) -\"") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
my $ec = $? >> 8;
|
||||
chdir("..") || die;
|
||||
|
||||
if ($ec == 0) {
|
||||
$c = $commit;
|
||||
$match = 1;
|
||||
last;
|
||||
}
|
||||
and system(
|
||||
"cd $package; osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | md5sum -c --quiet"
|
||||
) == 0
|
||||
)
|
||||
{
|
||||
|
||||
$c = $commit;
|
||||
$match = 1;
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,7 +165,7 @@ for my $package ( sort(@packages) ) {
|
||||
die "Match not found. Aborting.";
|
||||
}
|
||||
|
||||
system ("git -C $package push -f pool factory");
|
||||
#system ("git -C $package push -f pool factory");
|
||||
print "$package: $c\n";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,181 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
use strict;
|
||||
use warnings;
|
||||
use IPC::Open2;
|
||||
use JSON;
|
||||
|
||||
sub FindFactoryCommit {
|
||||
my ($package) = @_;
|
||||
|
||||
# Execute osc cat and capture output
|
||||
my $osc_cmd = "osc cat openSUSE:Factory $package $package.changes";
|
||||
open( my $osc_fh, "$osc_cmd |" ) or die "Failed to run osc: $!";
|
||||
my $data = do { local $/; <$osc_fh> };
|
||||
close($osc_fh);
|
||||
|
||||
# Calculate size
|
||||
my $size = length($data);
|
||||
|
||||
# Create blob header
|
||||
my $blob = "blob $size\0$data";
|
||||
|
||||
# Open a pipe to openssl to compute the hash
|
||||
my ( $reader, $writer );
|
||||
my $pid = open2( $reader, $writer, "openssl sha256" );
|
||||
|
||||
# Send blob data
|
||||
print $writer $blob;
|
||||
close $writer;
|
||||
|
||||
# Read the hash result and extract it
|
||||
my $hash_line = <$reader>;
|
||||
waitpid( $pid, 0 );
|
||||
my ($hash) = $hash_line =~ /([a-fA-F0-9]{64})/;
|
||||
|
||||
# Run git search command with the hash
|
||||
print("looking for hash: $hash\n");
|
||||
my @hashes;
|
||||
my $git_cmd =
|
||||
"git -C $package rev-list --all pool/HEAD | while read commit; do git -C $package ls-tree \"\$commit\" | grep -q '^100644 blob $hash' && echo \"\$commit\"; done";
|
||||
open( my $git_fh, "$git_cmd |" ) or die "Failed to run git search: $!";
|
||||
while ( my $commit = <$git_fh> ) {
|
||||
chomp $commit;
|
||||
print "Found commit $commit\n";
|
||||
push( @hashes, $commit );
|
||||
}
|
||||
close($git_fh);
|
||||
return @hashes;
|
||||
}
|
||||
|
||||
sub FactoryMd5 {
|
||||
my ($package) = @_;
|
||||
my $out = "";
|
||||
|
||||
if (system("osc ls openSUSE:Factory $package | grep -q build.specials.obscpio") == 0) {
|
||||
system("mkdir _extract") == 0 || die "_extract exists or can't make it. Aborting.";
|
||||
chdir("_extract") || die;
|
||||
system("osc cat openSUSE:Factory $package build.specials.obscpio | cpio -dium 2> /dev/null") == 0 || die;
|
||||
system("rm .* 2> /dev/null");
|
||||
open( my $fh, "find -type f -exec /usr/bin/basename {} \\; | xargs md5sum | awk '{print \$1 FS \$2}' | grep -v d41d8cd98f00b204e9800998ecf8427e |") or die;
|
||||
while ( my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
chdir("..") && system("rm -rf _extract") == 0 || die;
|
||||
}
|
||||
open( my $fh, "osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' |") or die;
|
||||
while (my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
return $out;
|
||||
}
|
||||
|
||||
# Read project from first argument
|
||||
sub Usage {
|
||||
die "Usage: $0 <OBS Project> <package> <repo>";
|
||||
}
|
||||
|
||||
my $project = shift or Usage();
|
||||
my $pkg = shift;
|
||||
my $repo = shift;
|
||||
|
||||
if (not defined($repo)) {
|
||||
Usage();
|
||||
}
|
||||
|
||||
|
||||
my $meta_url = `osc meta pkg $project $pkg | grep scmsync | sed -e 's,\\s*</\\?scmsync>\\s*,,g'`;
|
||||
chomp($meta_url);
|
||||
if ($meta_url ne $repo) {
|
||||
die "meta not equal to repo for $pkg: $meta_url != $repo";
|
||||
}
|
||||
|
||||
my ($org, $repo_path, $branch) = $meta_url =~ m{^https?://src\.opensuse\.org/([^/]+)/([^/#\s]+)(?:.*#(\S+))?};
|
||||
die "Only src.opensuse.org is supported" unless $org;
|
||||
|
||||
if ($org eq "pool") {
|
||||
print "Already a pool package. We are done.\n";
|
||||
exit(0);
|
||||
}
|
||||
|
||||
my @packages = ($pkg) if defined $pkg;
|
||||
|
||||
if ( ! -e $org ) {
|
||||
mkdir($org);
|
||||
}
|
||||
chdir($org);
|
||||
|
||||
my $super_user = $ENV{SUPER};
|
||||
if (defined($super_user)) {
|
||||
$super_user = "-G $super_user";
|
||||
} else {
|
||||
$super_user = "";
|
||||
}
|
||||
|
||||
for my $package ( sort(@packages) ) {
|
||||
print " ----- PROCESSING $package\n";
|
||||
my $url = "https://src.opensuse.org/$org/$repo_path.git";
|
||||
my $push_url = "gitea\@src.opensuse.org:pool/$package.git";
|
||||
if ( not -e $package ) {
|
||||
print("cloning...\n");
|
||||
system("git clone --origin pool $url $package") == 0
|
||||
or die "Can't clone $org/$repo_path";
|
||||
}
|
||||
else {
|
||||
print("adding remote...\n");
|
||||
system("git -C $package remote rm pool > /dev/null");
|
||||
system("git -C $package remote add pool $url") == 0
|
||||
or die "Can't add pool for $package";
|
||||
}
|
||||
system("git -C $package remote set-url pool --push $push_url") == 0
|
||||
or die "Can't add push remote for $package";
|
||||
print("fetching remote...\n");
|
||||
system("git -C $package fetch pool") == 0 or die "Can't fetch pool for $package";
|
||||
|
||||
my @commits = FindFactoryCommit($package);
|
||||
my $Md5Hashes = FactoryMd5($package);
|
||||
my $c;
|
||||
my $match = 0;
|
||||
for my $commit (@commits) {
|
||||
if ( length($commit) != 64 ) {
|
||||
print("Failed to find factory commit. Aborting.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if (
|
||||
system("git -C $package lfs fetch pool $commit") == 0
|
||||
and system("git -C $package checkout -B factory $commit") == 0
|
||||
and system("git -C $package lfs checkout") == 0
|
||||
and chdir($package)) {
|
||||
|
||||
open(my $fh, "|-", "md5sum -c --quiet") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
if ($? >> 8 != 0) {
|
||||
chdir("..") || die;
|
||||
next;
|
||||
}
|
||||
open($fh, "|-", "awk '{print \$2}' | sort | bash -c \"diff <(ls -1 | sort) -\"") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
my $ec = $? >> 8;
|
||||
chdir("..") || die;
|
||||
|
||||
if ($ec == 0) {
|
||||
$c = $commit;
|
||||
$match = 1;
|
||||
last;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if ( !$match ) {
|
||||
die "Match not found. Aborting.";
|
||||
}
|
||||
|
||||
system ("git -C $package push -f pool factory");
|
||||
print "$package: $c\n";
|
||||
}
|
||||
|
||||
@@ -1,24 +1,16 @@
|
||||
SystemsManagement
|
||||
Java:packages
|
||||
Kernel:firmware
|
||||
Kernel:kdump
|
||||
devel:gcc
|
||||
devel:languages:clojure
|
||||
devel:languages:erlang
|
||||
devel:languages:erlang:Factory
|
||||
devel:languages:hare
|
||||
devel:languages:javascript
|
||||
devel:languages:lua
|
||||
devel:languages:nodejs
|
||||
devel:languages:perl
|
||||
devel:languages:python:Factory
|
||||
devel:languages:python:pytest
|
||||
devel:openSUSE:Factory
|
||||
network:chromium
|
||||
network:dhcp
|
||||
network:im:whatsapp
|
||||
network:messaging:xmpp
|
||||
science:HPC
|
||||
server:dns
|
||||
systemsmanagement:cockpit
|
||||
X11:lxde
|
||||
|
||||
@@ -14,11 +14,15 @@ import (
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type Status struct {
|
||||
Context string `json:"context"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
type StatusInput struct {
|
||||
Description string `json:"description"`
|
||||
Context string `json:"context"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -55,26 +59,23 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
config, ok := r.Context().Value(configKey).(*Config)
|
||||
|
||||
if !ok {
|
||||
common.LogDebug("Config missing from context")
|
||||
common.LogError("Config missing from context")
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
header := r.Header.Get("Authorization")
|
||||
if header == "" {
|
||||
common.LogDebug("Authorization header not found")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
token_arr := strings.Split(header, " ")
|
||||
if len(token_arr) != 2 {
|
||||
common.LogDebug("Authorization header malformed")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if !strings.EqualFold(token_arr[0], "token") {
|
||||
common.LogDebug("Token not found in Authorization header")
|
||||
if !strings.EqualFold(token_arr[0], "Bearer") {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
@@ -82,7 +83,6 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
token := token_arr[1]
|
||||
|
||||
if !slices.Contains(config.Keys, token) {
|
||||
common.LogDebug("Provided token is not known")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
@@ -104,8 +104,13 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
status := Status{
|
||||
Context: "Build in obs",
|
||||
State: statusinput.State,
|
||||
TargetUrl: statusinput.TargetUrl,
|
||||
}
|
||||
|
||||
status_payload, err := json.Marshal(statusinput)
|
||||
status_payload, err := json.Marshal(status)
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
@@ -126,8 +131,8 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", fmt.Sprintf("token %s", ForgeToken))
|
||||
req.Header.Add("Content-Type", "Content-Type")
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
|
||||
|
||||
resp, err := client.Do(req)
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# gitea_status_proxy
|
||||
|
||||
Allows bots without code owner permission to set Gitea's commit status
|
||||
|
||||
## Basic usage
|
||||
|
||||
To beging, you need the json config and a Gitea token with permissions to the repository you want to write to.
|
||||
|
||||
Keys should be randomly generated, i.e by using openssl: `openssl rand -base64 48`
|
||||
|
||||
Generate a json config file, with the key generated from running the command above, save as example.json:
|
||||
|
||||
```
|
||||
{
|
||||
"forge_url": "https://src.opensuse.org/api/v1",
|
||||
"keys": ["$YOUR_TOKEN_GOES_HERE"]
|
||||
}
|
||||
```
|
||||
|
||||
### start the proxy:
|
||||
|
||||
```
|
||||
GITEA_TOKEN=YOURTOKEN ./gitea_status_proxy -config example.json
|
||||
2025/10/30 12:53:18 [I] server up and listening on :3000
|
||||
```
|
||||
|
||||
Now the proxy should be able to accept requests under: `localhost:3000/repos/{owner}/{repo}/statuses/{sha}`, the token to be used when authenticating to the proxy must be in the `keys` list of the configuration json file (example.json above)
|
||||
|
||||
### example:
|
||||
|
||||
On a separate terminal, you can use curl to post a status to the proxy, if the GITEA_TOKEN has permissions on the target
|
||||
repository, it will result in a new status being set for the given commit
|
||||
|
||||
```
|
||||
curl -X 'POST' \
|
||||
'localhost:3000/repos/szarate/test-actions-gitea/statuses/cd5847c92fb65a628bdd6015f96ee7e569e1ad6e4fc487acc149b52e788262f9' \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Authorization: token $YOUR_TOKEN_GOES_HERE' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"context": "Proxy test",
|
||||
"description": "Status posted from the proxy",
|
||||
"state": "success",
|
||||
"target_url": "https://src.opensuse.org"
|
||||
}'
|
||||
```
|
||||
|
||||
After this you should be able to the results in the pull request, e.g from above: https://src.opensuse.org/szarate/test-actions-gitea/pulls/1
|
||||
@@ -1,65 +1,41 @@
|
||||
Group Review Bot
|
||||
================
|
||||
|
||||
This workaround is mainly needed because Gitea does not track which team member performed a review on behalf of a team.
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
1. Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
Awaits a comment in the format “@groupreviewbot-name: approve”, then approves the PR with the comment “<user> approved a review on behalf of <groupreviewbot-name>.”
|
||||
2. Assumes: workflow-pr needs to associate and define the PR set from
|
||||
which the groups.json is read (Base of the PrjGit PR)
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Projects where policy reviews are required.
|
||||
|
||||
Configuration
|
||||
Configiuration
|
||||
--------------
|
||||
|
||||
The bot is configured via the `ReviewGroups` field in the `workflow.config` file, located in the ProjectGit repository.
|
||||
Groups are defined in the workflow.config inside the project git. They take following options,
|
||||
|
||||
See `ReviewGroups` in the [workflow-pr configuration](../workflow-pr/README.md#config-file).
|
||||
|
||||
```json
|
||||
{
|
||||
...
|
||||
"ReviewGroups": [
|
||||
{
|
||||
"Name": "name of the group user",
|
||||
"Reviewers": ["members", "of", "group"],
|
||||
"Silent": "(true, false) -- if true, do not explicitly require review requests of group members"
|
||||
}
|
||||
],
|
||||
...
|
||||
...
|
||||
ReviewGroups: [
|
||||
{
|
||||
"Name": "name of the group user",
|
||||
"Reviewers": ["members", "of", "group"],
|
||||
"Silent": (true, false) -- if true, do not explicitly require review requests of group members
|
||||
},
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Server configuration
|
||||
--------------------------
|
||||
|
||||
**Configuration file:**
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| root | Array of string | Format **org/repo\#branch** |
|
||||
|
||||
Requirements
|
||||
------------
|
||||
Gitea token with following permissions:
|
||||
- R/W PullRequest
|
||||
- R/W Notification
|
||||
- R User
|
||||
|
||||
Env Variables
|
||||
-------------
|
||||
The following variables can be used (and override) command line parameters.
|
||||
|
||||
* `AUTOGITS_CONFIG` - config file location
|
||||
* `AUTOGITS_URL` - Gitea URL
|
||||
* `AUTOGITS_RABBITURL` - RabbitMQ url
|
||||
* `AUTOGITS_DEBUG` - when set, debug level logging enabled
|
||||
|
||||
Authentication env variables
|
||||
* `GITEA_TOKEN` - Gitea user token
|
||||
* `AMQP_USERNAME`, `AMQP_PASSWORD` - username and password for rabbitmq
|
||||
* Gitea token to:
|
||||
+ R/W PullRequest
|
||||
+ R/W Notification
|
||||
+ R User
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
@@ -18,23 +17,20 @@ import (
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type ReviewBot struct {
|
||||
configs common.AutogitConfigs
|
||||
acceptRx *regexp.Regexp
|
||||
rejectRx *regexp.Regexp
|
||||
groupName string
|
||||
gitea common.Gitea
|
||||
var configs common.AutogitConfigs
|
||||
var acceptRx *regexp.Regexp
|
||||
var rejectRx *regexp.Regexp
|
||||
var groupName string
|
||||
|
||||
func InitRegex(newGroupName string) {
|
||||
groupName = newGroupName
|
||||
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
|
||||
rejectRx = regexp.MustCompile("^:\\s*")
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) InitRegex(newGroupName string) {
|
||||
bot.groupName = newGroupName
|
||||
bot.acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
|
||||
bot.rejectRx = regexp.MustCompile("^:\\s*")
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ParseReviewLine(reviewText string) (bool, string) {
|
||||
func ParseReviewLine(reviewText string) (bool, string) {
|
||||
line := strings.TrimSpace(reviewText)
|
||||
groupTextName := "@" + bot.groupName
|
||||
groupTextName := "@" + groupName
|
||||
glen := len(groupTextName)
|
||||
if len(line) < glen || line[0:glen] != groupTextName {
|
||||
return false, line
|
||||
@@ -54,20 +50,20 @@ func (bot *ReviewBot) ParseReviewLine(reviewText string) (bool, string) {
|
||||
return false, line
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ReviewAccepted(reviewText string) bool {
|
||||
func ReviewAccepted(reviewText string) bool {
|
||||
for _, line := range common.SplitStringNoEmpty(reviewText, "\n") {
|
||||
if matched, reviewLine := bot.ParseReviewLine(line); matched {
|
||||
return bot.acceptRx.MatchString(reviewLine)
|
||||
if matched, reviewLine := ParseReviewLine(line); matched {
|
||||
return acceptRx.MatchString(reviewLine)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ReviewRejected(reviewText string) bool {
|
||||
func ReviewRejected(reviewText string) bool {
|
||||
for _, line := range common.SplitStringNoEmpty(reviewText, "\n") {
|
||||
if matched, reviewLine := bot.ParseReviewLine(line); matched {
|
||||
if bot.rejectRx.MatchString(reviewLine) {
|
||||
return !bot.acceptRx.MatchString(reviewLine)
|
||||
if matched, reviewLine := ParseReviewLine(line); matched {
|
||||
if rejectRx.MatchString(reviewLine) {
|
||||
return !acceptRx.MatchString(reviewLine)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,10 +113,10 @@ var commentStrings = []string{
|
||||
"change_time_estimate",
|
||||
}*/
|
||||
|
||||
func (bot *ReviewBot) FindAcceptableReviewInTimeline(user string, timeline []*models.TimelineComment, reviews []*models.PullReview) *models.TimelineComment {
|
||||
func FindAcceptableReviewInTimeline(user string, timeline []*models.TimelineComment, reviews []*models.PullReview) *models.TimelineComment {
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User.UserName == user && t.Created == t.Updated {
|
||||
if bot.ReviewAccepted(t.Body) || bot.ReviewRejected(t.Body) {
|
||||
if ReviewAccepted(t.Body) || ReviewRejected(t.Body) {
|
||||
return t
|
||||
}
|
||||
}
|
||||
@@ -129,9 +125,9 @@ func (bot *ReviewBot) FindAcceptableReviewInTimeline(user string, timeline []*mo
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) FindOurLastReviewInTimeline(timeline []*models.TimelineComment) *models.TimelineComment {
|
||||
func FindOurLastReviewInTimeline(timeline []*models.TimelineComment) *models.TimelineComment {
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Review && t.User.UserName == bot.groupName && t.Created == t.Updated {
|
||||
if t.Type == common.TimelineCommentType_Review && t.User.UserName == groupName && t.Created == t.Updated {
|
||||
return t
|
||||
}
|
||||
}
|
||||
@@ -139,13 +135,13 @@ func (bot *ReviewBot) FindOurLastReviewInTimeline(timeline []*models.TimelineCom
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) UnrequestReviews(org, repo string, id int64, users []string) {
|
||||
if err := bot.gitea.UnrequestReview(org, repo, id, users...); err != nil {
|
||||
func UnrequestReviews(gitea common.Gitea, org, repo string, id int64, users []string) {
|
||||
if err := gitea.UnrequestReview(org, repo, id, users...); err != nil {
|
||||
common.LogError("Can't remove reviewrs after a review:", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThread) {
|
||||
func ProcessNotifications(notification *models.NotificationThread, gitea common.Gitea) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
common.LogInfo("panic cought --- recovered")
|
||||
@@ -153,7 +149,7 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
}
|
||||
}()
|
||||
|
||||
rx := regexp.MustCompile(`^/?api/v\d+/repos/(?<org>[_\.a-zA-Z0-9-]+)/(?<project>[_\.a-zA-Z0-9-]+)/(?:issues|pulls)/(?<num>[0-9]+)$`)
|
||||
rx := regexp.MustCompile(`^/?api/v\d+/repos/(?<org>[_a-zA-Z0-9-]+)/(?<project>[_a-zA-Z0-9-]+)/(?:issues|pulls)/(?<num>[0-9]+)$`)
|
||||
subject := notification.Subject
|
||||
u, err := url.Parse(notification.Subject.URL)
|
||||
if err != nil {
|
||||
@@ -172,14 +168,14 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
id, _ := strconv.ParseInt(match[3], 10, 64)
|
||||
|
||||
common.LogInfo("processing:", fmt.Sprintf("%s/%s!%d", org, repo, id))
|
||||
pr, err := bot.gitea.GetPullRequest(org, repo, id)
|
||||
pr, err := gitea.GetPullRequest(org, repo, id)
|
||||
if err != nil {
|
||||
common.LogError(" ** Cannot fetch PR associated with review:", subject.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := bot.ProcessPR(pr); err == nil && !common.IsDryRun {
|
||||
if err := bot.gitea.SetNotificationRead(notification.ID); err != nil {
|
||||
if err := ProcessPR(pr); err == nil && !common.IsDryRun {
|
||||
if err := gitea.SetNotificationRead(notification.ID); err != nil {
|
||||
common.LogDebug(" Cannot set notification as read", err)
|
||||
}
|
||||
} else if err != nil && err != ReviewNotFinished {
|
||||
@@ -189,24 +185,24 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
|
||||
var ReviewNotFinished = fmt.Errorf("Review is not finished")
|
||||
|
||||
func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
func ProcessPR(pr *models.PullRequest) error {
|
||||
org := pr.Base.Repo.Owner.UserName
|
||||
repo := pr.Base.Repo.Name
|
||||
id := pr.Index
|
||||
|
||||
found := false
|
||||
for _, reviewer := range pr.RequestedReviewers {
|
||||
if reviewer != nil && reviewer.UserName == bot.groupName {
|
||||
if reviewer != nil && reviewer.UserName == groupName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
common.LogInfo(" review is not requested for", bot.groupName)
|
||||
common.LogInfo(" review is not requested for", groupName)
|
||||
return nil
|
||||
}
|
||||
|
||||
config := bot.configs.GetPrjGitConfig(org, repo, pr.Base.Name)
|
||||
config := configs.GetPrjGitConfig(org, repo, pr.Base.Name)
|
||||
if config == nil {
|
||||
return fmt.Errorf("Cannot find config for: %s", pr.URL)
|
||||
}
|
||||
@@ -216,17 +212,17 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
reviews, err := bot.gitea.GetPullRequestReviews(org, repo, id)
|
||||
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch reviews for: %v: %w", pr.URL, err)
|
||||
}
|
||||
|
||||
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(bot.gitea, bot.groupName, pr.Head.Sha, org, repo, id)
|
||||
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(gitea, groupName, pr.Head.Sha, org, repo, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch timeline to review. %w", err)
|
||||
}
|
||||
|
||||
groupConfig, err := config.GetReviewGroup(bot.groupName)
|
||||
groupConfig, err := config.GetReviewGroup(groupName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch review group. %w", err)
|
||||
}
|
||||
@@ -237,30 +233,30 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
// pr.Head.Sha
|
||||
|
||||
for _, reviewer := range requestReviewers {
|
||||
if review := bot.FindAcceptableReviewInTimeline(reviewer, timeline, reviews); review != nil {
|
||||
if bot.ReviewAccepted(review.Body) {
|
||||
if review := FindAcceptableReviewInTimeline(reviewer, timeline, reviews); review != nil {
|
||||
if ReviewAccepted(review.Body) {
|
||||
if !common.IsDryRun {
|
||||
text := reviewer + " approved a review on behalf of " + bot.groupName
|
||||
if review := bot.FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := bot.gitea.AddReviewComment(pr, common.ReviewStateApproved, text)
|
||||
text := reviewer + " approved a review on behalf of " + groupName
|
||||
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, text)
|
||||
if err != nil {
|
||||
common.LogError(" -> failed to write approval comment", err)
|
||||
}
|
||||
bot.UnrequestReviews(org, repo, id, requestReviewers)
|
||||
UnrequestReviews(gitea, org, repo, id, requestReviewers)
|
||||
}
|
||||
}
|
||||
common.LogInfo(" -> approved by", reviewer)
|
||||
common.LogInfo(" review at", review.Created)
|
||||
return nil
|
||||
} else if bot.ReviewRejected(review.Body) {
|
||||
} else if ReviewRejected(review.Body) {
|
||||
if !common.IsDryRun {
|
||||
text := reviewer + " requested changes on behalf of " + bot.groupName + ". See " + review.HTMLURL
|
||||
if review := bot.FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := bot.gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, text)
|
||||
text := reviewer + " requested changes on behalf of " + groupName + ". See " + review.HTMLURL
|
||||
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Changes requested. See review by: "+reviewer)
|
||||
if err != nil {
|
||||
common.LogError(" -> failed to write rejecting comment", err)
|
||||
}
|
||||
bot.UnrequestReviews(org, repo, id, requestReviewers)
|
||||
UnrequestReviews(gitea, org, repo, id, requestReviewers)
|
||||
}
|
||||
}
|
||||
common.LogInfo(" -> declined by", reviewer)
|
||||
@@ -274,7 +270,7 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
if !groupConfig.Silent && len(requestReviewers) > 0 {
|
||||
common.LogDebug(" Requesting reviews for:", requestReviewers)
|
||||
if !common.IsDryRun {
|
||||
if _, err := bot.gitea.RequestReviews(pr, requestReviewers...); err != nil {
|
||||
if _, err := gitea.RequestReviews(pr, requestReviewers...); err != nil {
|
||||
common.LogDebug(" -> err:", err)
|
||||
}
|
||||
} else {
|
||||
@@ -287,40 +283,42 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
// add a helpful comment, if not yet added
|
||||
found_help_comment := false
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User != nil && t.User.UserName == bot.groupName {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User != nil && t.User.UserName == groupName {
|
||||
found_help_comment = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found_help_comment && !common.IsDryRun {
|
||||
helpComment := fmt.Sprintln("Review by", bot.groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
|
||||
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
|
||||
"Do **not** use standard review interface to review on behalf of the group.\n"+
|
||||
"To accept the review on behalf of the group, create the following comment: `@"+bot.groupName+": approve`.\n"+
|
||||
"To request changes on behalf of the group, create the following comment: `@"+bot.groupName+": decline` followed with lines justifying the decision.\n"+
|
||||
"To accept the review on behalf of the group, create the following comment: `@"+groupName+": approve`.\n"+
|
||||
"To request changes on behalf of the group, create the following comment: `@"+groupName+": decline` followed with lines justifying the decision.\n"+
|
||||
"Future edits of the comments are ignored, a new comment is required to change the review state.")
|
||||
if slices.Contains(groupConfig.Reviewers, pr.User.UserName) {
|
||||
helpComment = helpComment + "\n\n" +
|
||||
"Submitter is member of this review group, hence they are excluded from being one of the reviewers here"
|
||||
}
|
||||
bot.gitea.AddComment(pr, helpComment)
|
||||
gitea.AddComment(pr, helpComment)
|
||||
}
|
||||
|
||||
return ReviewNotFinished
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) PeriodReviewCheck() {
|
||||
notifications, err := bot.gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
|
||||
func PeriodReviewCheck() {
|
||||
notifications, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
|
||||
if err != nil {
|
||||
common.LogError(" Error fetching unread notifications: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, notification := range notifications {
|
||||
bot.ProcessNotifications(notification)
|
||||
ProcessNotifications(notification, gitea)
|
||||
}
|
||||
}
|
||||
|
||||
var gitea common.Gitea
|
||||
|
||||
func main() {
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance used for reviews")
|
||||
rabbitMqHost := flag.String("rabbit-url", "amqps://rabbit.opensuse.org", "RabbitMQ instance where Gitea webhook notifications are sent")
|
||||
@@ -330,24 +328,6 @@ func main() {
|
||||
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry run, no effect. For debugging")
|
||||
flag.Parse()
|
||||
|
||||
if err := common.SetLoggingLevelFromString(*logging); err != nil {
|
||||
common.LogError(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if cf := os.Getenv("AUTOGITS_CONFIG"); len(cf) > 0 {
|
||||
*configFile = cf
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_URL"); len(url) > 0 {
|
||||
*giteaUrl = url
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_RABBITURL"); len(url) > 0 {
|
||||
*rabbitMqHost = url
|
||||
}
|
||||
if debug := os.Getenv("AUTOGITS_DEBUG"); len(debug) > 0 {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
}
|
||||
|
||||
args := flag.Args()
|
||||
if len(args) != 1 {
|
||||
log.Println(" syntax:")
|
||||
@@ -356,7 +336,7 @@ func main() {
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
targetGroupName := args[0]
|
||||
groupName = args[0]
|
||||
|
||||
if *configFile == "" {
|
||||
common.LogError("Missing config file")
|
||||
@@ -379,35 +359,36 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
giteaTransport := common.AllocateGiteaTransport(*giteaUrl)
|
||||
configs, err := common.ResolveWorkflowConfigs(giteaTransport, configData)
|
||||
gitea = common.AllocateGiteaTransport(*giteaUrl)
|
||||
configs, err = common.ResolveWorkflowConfigs(gitea, configData)
|
||||
if err != nil {
|
||||
common.LogError("Cannot parse workflow configs:", err)
|
||||
return
|
||||
}
|
||||
|
||||
reviewer, err := giteaTransport.GetCurrentUser()
|
||||
reviewer, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
common.LogError("Cannot fetch review user:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := common.SetLoggingLevelFromString(*logging); err != nil {
|
||||
common.LogError(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if *interval < 1 {
|
||||
*interval = 1
|
||||
}
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: giteaTransport,
|
||||
configs: configs,
|
||||
}
|
||||
bot.InitRegex(targetGroupName)
|
||||
InitRegex(groupName)
|
||||
|
||||
common.LogInfo(" ** processing group reviews for group:", bot.groupName)
|
||||
common.LogInfo(" ** processing group reviews for group:", groupName)
|
||||
common.LogInfo(" ** username in Gitea:", reviewer.UserName)
|
||||
common.LogInfo(" ** polling interval:", *interval, "min")
|
||||
common.LogInfo(" ** connecting to RabbitMQ:", *rabbitMqHost)
|
||||
|
||||
if bot.groupName != reviewer.UserName {
|
||||
if groupName != reviewer.UserName {
|
||||
common.LogError(" ***** Reviewer does not match group name. Aborting. *****")
|
||||
return
|
||||
}
|
||||
@@ -419,13 +400,10 @@ func main() {
|
||||
}
|
||||
|
||||
config_update := ConfigUpdatePush{
|
||||
bot: bot,
|
||||
config_modified: make(chan *common.AutogitConfig),
|
||||
}
|
||||
|
||||
process_issue_pr := IssueCommentProcessor{
|
||||
bot: bot,
|
||||
}
|
||||
process_issue_pr := IssueCommentProcessor{}
|
||||
|
||||
configUpdates := &common.RabbitMQGiteaEventsProcessor{
|
||||
Orgs: []string{},
|
||||
@@ -435,7 +413,7 @@ func main() {
|
||||
},
|
||||
}
|
||||
configUpdates.Connection().RabbitURL = u
|
||||
for _, c := range bot.configs {
|
||||
for _, c := range configs {
|
||||
if org, _, _ := c.GetPrjGit(); !slices.Contains(configUpdates.Orgs, org) {
|
||||
configUpdates.Orgs = append(configUpdates.Orgs, org)
|
||||
}
|
||||
@@ -448,17 +426,17 @@ func main() {
|
||||
select {
|
||||
case configTouched, ok := <-config_update.config_modified:
|
||||
if ok {
|
||||
for idx, c := range bot.configs {
|
||||
for idx, c := range configs {
|
||||
if c == configTouched {
|
||||
org, repo, branch := c.GetPrjGit()
|
||||
prj := fmt.Sprintf("%s/%s#%s", org, repo, branch)
|
||||
common.LogInfo("Detected config update for", prj)
|
||||
|
||||
new_config, err := common.ReadWorkflowConfig(bot.gitea, prj)
|
||||
new_config, err := common.ReadWorkflowConfig(gitea, prj)
|
||||
if err != nil {
|
||||
common.LogError("Failed parsing Project config for", prj, err)
|
||||
} else {
|
||||
bot.configs[idx] = new_config
|
||||
configs[idx] = new_config
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -468,7 +446,7 @@ func main() {
|
||||
}
|
||||
}
|
||||
|
||||
bot.PeriodReviewCheck()
|
||||
PeriodReviewCheck()
|
||||
time.Sleep(time.Duration(*interval * int64(time.Minute)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,359 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestProcessPR(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
prIndex := int64(1)
|
||||
headSha := "abcdef123456"
|
||||
|
||||
pr := &models.PullRequest{
|
||||
Index: prIndex,
|
||||
URL: "http://gitea/pr/1",
|
||||
State: "open",
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{
|
||||
UserName: org,
|
||||
},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: headSha,
|
||||
},
|
||||
User: &models.User{
|
||||
UserName: "submitter",
|
||||
},
|
||||
RequestedReviewers: []*models.User{
|
||||
{UserName: groupName},
|
||||
},
|
||||
}
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{
|
||||
{
|
||||
Name: groupName,
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
},
|
||||
},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
|
||||
t.Run("Review not requested for group", func(t *testing.T) {
|
||||
prNoRequest := *pr
|
||||
prNoRequest.RequestedReviewers = nil
|
||||
err := bot.ProcessPR(&prNoRequest)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PR is closed", func(t *testing.T) {
|
||||
prClosed := *pr
|
||||
prClosed.State = "closed"
|
||||
err := bot.ProcessPR(&prClosed)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Successful Approval", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
// reviewer1 approved in timeline
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
expectedText := "reviewer1 approved a review on behalf of " + groupName
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateApproved, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Dry Run - No actions taken", func(t *testing.T) {
|
||||
common.IsDryRun = true
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// No AddReviewComment or UnrequestReview should be called
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Approval already exists - No new comment", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
approvalText := "reviewer1 approved a review on behalf of " + groupName
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Review,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: approvalText,
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// No AddReviewComment, UnrequestReview, or AddComment should be called
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Rejection already exists - No new comment", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
rejectionText := "reviewer1 requested changes on behalf of " + groupName + ". See http://gitea/comment/123"
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Review,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: rejectionText,
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": decline",
|
||||
HTMLURL: "http://gitea/comment/123",
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Pending review - Help comment already exists", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Some help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// It will try to request reviews
|
||||
mockGitea.EXPECT().RequestReviews(pr, "reviewer1", "reviewer2").Return(nil, nil)
|
||||
|
||||
// AddComment should NOT be called because bot already has a comment in timeline
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Submitter is group member - Excluded from review request", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
prSubmitterMember := *pr
|
||||
prSubmitterMember.User = &models.User{UserName: "reviewer1"}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(nil, nil)
|
||||
mockGitea.EXPECT().RequestReviews(&prSubmitterMember, "reviewer2").Return(nil, nil)
|
||||
mockGitea.EXPECT().AddComment(&prSubmitterMember, gomock.Any()).Return(nil)
|
||||
err := bot.ProcessPR(&prSubmitterMember)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Successful Rejection", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer2"},
|
||||
Body: "@" + groupName + ": decline",
|
||||
HTMLURL: "http://gitea/comment/999",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
expectedText := "reviewer2 requested changes on behalf of " + groupName + ". See http://gitea/comment/999"
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateRequestChanges, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Config not found", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{}
|
||||
err := bot.ProcessPR(pr)
|
||||
if err == nil {
|
||||
t.Error("Expected error when config is missing, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequestReviews", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, fmt.Errorf("gitea error"))
|
||||
err := bot.ProcessPR(pr)
|
||||
if err == nil {
|
||||
t.Error("Expected error from gitea, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestProcessNotifications(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
prIndex := int64(123)
|
||||
notificationID := int64(456)
|
||||
|
||||
notification := &models.NotificationThread{
|
||||
ID: notificationID,
|
||||
Subject: &models.NotificationSubject{
|
||||
URL: fmt.Sprintf("http://gitea/api/v1/repos/%s/%s/pulls/%d", org, repo, prIndex),
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("Notification Success", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
pr := &models.PullRequest{
|
||||
Index: prIndex,
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "headsha",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{{UserName: groupName}},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, prIndex).Return(pr, nil)
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{{Name: groupName, Reviewers: []string{"r1"}}},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "r1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
expectedText := "r1 approved a review on behalf of " + groupName
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateApproved, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
|
||||
mockGitea.EXPECT().SetNotificationRead(notificationID).Return(nil)
|
||||
|
||||
bot.ProcessNotifications(notification)
|
||||
|
||||
})
|
||||
|
||||
t.Run("Invalid Notification URL", func(t *testing.T) {
|
||||
badNotification := &models.NotificationThread{
|
||||
Subject: &models.NotificationSubject{
|
||||
URL: "http://gitea/invalid/url",
|
||||
},
|
||||
}
|
||||
bot.ProcessNotifications(badNotification)
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequest", func(t *testing.T) {
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, prIndex).Return(nil, fmt.Errorf("gitea error"))
|
||||
bot.ProcessNotifications(notification)
|
||||
})
|
||||
}
|
||||
import "testing"
|
||||
|
||||
func TestReviewApprovalCheck(t *testing.T) {
|
||||
tests := []struct {
|
||||
@@ -413,78 +60,16 @@ func TestReviewApprovalCheck(t *testing.T) {
|
||||
InString: "@group2: disapprove",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "Whitespace before colon",
|
||||
GroupName: "group",
|
||||
InString: "@group : LGTM",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "No whitespace after colon",
|
||||
GroupName: "group",
|
||||
InString: "@group:LGTM",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Leading and trailing whitespace on line",
|
||||
GroupName: "group",
|
||||
InString: " @group: LGTM ",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Approved on second line",
|
||||
GroupName: "group",
|
||||
InString: "Random noise\n@group: approved",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Multiple group mentions, first wins",
|
||||
GroupName: "group",
|
||||
InString: "@group: decline\n@group: approve",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Approved on second line",
|
||||
GroupName: "group",
|
||||
InString: "noise\n@group: approve\nmore noise",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Not at start of line (even with whitespace)",
|
||||
GroupName: "group",
|
||||
InString: "Hello @group: approve",
|
||||
Approved: false,
|
||||
},
|
||||
{
|
||||
Name: "Rejecting with reason",
|
||||
GroupName: "group",
|
||||
InString: "@group: decline because of X, Y and Z",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "No colon after group",
|
||||
GroupName: "group",
|
||||
InString: "@group LGTM",
|
||||
Approved: false,
|
||||
Rejected: false,
|
||||
},
|
||||
{
|
||||
Name: "Invalid char after group",
|
||||
GroupName: "group",
|
||||
InString: "@group! LGTM",
|
||||
Approved: false,
|
||||
Rejected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
bot := &ReviewBot{}
|
||||
bot.InitRegex(test.GroupName)
|
||||
InitRegex(test.GroupName)
|
||||
|
||||
if r := bot.ReviewAccepted(test.InString); r != test.Approved {
|
||||
if r := ReviewAccepted(test.InString); r != test.Approved {
|
||||
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
|
||||
}
|
||||
if r := bot.ReviewRejected(test.InString); r != test.Rejected {
|
||||
if r := ReviewRejected(test.InString); r != test.Rejected {
|
||||
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -7,9 +7,7 @@ import (
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type IssueCommentProcessor struct {
|
||||
bot *ReviewBot
|
||||
}
|
||||
type IssueCommentProcessor struct{}
|
||||
|
||||
func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
|
||||
if req.Type != common.RequestType_IssueComment {
|
||||
@@ -21,15 +19,14 @@ func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
|
||||
repo := data.Repository.Name
|
||||
index := int64(data.Issue.Number)
|
||||
|
||||
pr, err := s.bot.gitea.GetPullRequest(org, repo, index)
|
||||
pr, err := gitea.GetPullRequest(org, repo, index)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch PullRequest from event: %s/%s!%d Error: %w", org, repo, index, err)
|
||||
}
|
||||
return s.bot.ProcessPR(pr)
|
||||
return ProcessPR(pr)
|
||||
}
|
||||
|
||||
type ConfigUpdatePush struct {
|
||||
bot *ReviewBot
|
||||
config_modified chan *common.AutogitConfig
|
||||
}
|
||||
|
||||
@@ -49,7 +46,7 @@ func (s *ConfigUpdatePush) ProcessFunc(req *common.Request) error {
|
||||
}
|
||||
branch := data.Ref[len(branch_ref):]
|
||||
|
||||
c := s.bot.configs.GetPrjGitConfig(org, repo, branch)
|
||||
c := configs.GetPrjGitConfig(org, repo, branch)
|
||||
if c == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -67,7 +64,7 @@ func (s *ConfigUpdatePush) ProcessFunc(req *common.Request) error {
|
||||
}
|
||||
|
||||
if modified_config {
|
||||
for _, config := range s.bot.configs {
|
||||
for _, config := range configs {
|
||||
if o, r, _ := config.GetPrjGit(); o == org && r == repo {
|
||||
s.config_modified <- config
|
||||
}
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestIssueCommentProcessor(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
processor := &IssueCommentProcessor{bot: bot}
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
index := 123
|
||||
|
||||
event := &common.IssueCommentWebhookEvent{
|
||||
Repository: &common.Repository{
|
||||
Name: repo,
|
||||
Owner: &common.Organization{
|
||||
Username: org,
|
||||
},
|
||||
},
|
||||
Issue: &common.IssueDetail{
|
||||
Number: index,
|
||||
},
|
||||
}
|
||||
|
||||
req := &common.Request{
|
||||
Type: common.RequestType_IssueComment,
|
||||
Data: event,
|
||||
}
|
||||
|
||||
t.Run("Successful Processing", func(t *testing.T) {
|
||||
pr := &models.PullRequest{
|
||||
Index: int64(index),
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "headsha",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{{UserName: groupName}},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, int64(index)).Return(pr, nil)
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{{Name: groupName, Reviewers: []string{"r1"}}},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, int64(index)).Return(nil, nil)
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, int64(index)).Return(nil, nil)
|
||||
mockGitea.EXPECT().RequestReviews(pr, "r1").Return(nil, nil)
|
||||
mockGitea.EXPECT().AddComment(pr, gomock.Any()).Return(nil)
|
||||
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequest", func(t *testing.T) {
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, int64(index)).Return(nil, fmt.Errorf("gitea error"))
|
||||
err := processor.ProcessFunc(req)
|
||||
if err == nil {
|
||||
t.Error("Expected error, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Wrong Request Type", func(t *testing.T) {
|
||||
wrongReq := &common.Request{Type: common.RequestType_Push}
|
||||
err := processor.ProcessFunc(wrongReq)
|
||||
if err == nil {
|
||||
t.Error("Expected error for wrong request type, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestConfigUpdatePush(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
groupName := "testgroup"
|
||||
bot := &ReviewBot{
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
configChan := make(chan *common.AutogitConfig, 1)
|
||||
processor := &ConfigUpdatePush{
|
||||
bot: bot,
|
||||
config_modified: configChan,
|
||||
}
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
branch := "main"
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#" + branch,
|
||||
Organization: org,
|
||||
Branch: branch,
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
|
||||
event := &common.PushWebhookEvent{
|
||||
Ref: "refs/heads/" + branch,
|
||||
Repository: &common.Repository{
|
||||
Name: repo,
|
||||
Owner: &common.Organization{
|
||||
Username: org,
|
||||
},
|
||||
},
|
||||
Commits: []common.Commit{
|
||||
{
|
||||
Modified: []string{common.ProjectConfigFile},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
req := &common.Request{
|
||||
Type: common.RequestType_Push,
|
||||
Data: event,
|
||||
}
|
||||
|
||||
t.Run("Config Modified", func(t *testing.T) {
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case modified := <-configChan:
|
||||
if modified != prjConfig {
|
||||
t.Errorf("Expected modified config to be %v, got %v", prjConfig, modified)
|
||||
}
|
||||
default:
|
||||
t.Error("Expected config modification signal, but none received")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("No Config Modified", func(t *testing.T) {
|
||||
noConfigEvent := *event
|
||||
noConfigEvent.Commits = []common.Commit{{Modified: []string{"README.md"}}}
|
||||
noConfigReq := &common.Request{Type: common.RequestType_Push, Data: &noConfigEvent}
|
||||
|
||||
err := processor.ProcessFunc(noConfigReq)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case <-configChan:
|
||||
t.Error("Did not expect config modification signal")
|
||||
default:
|
||||
// Success
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Wrong Branch Ref", func(t *testing.T) {
|
||||
wrongBranchEvent := *event
|
||||
wrongBranchEvent.Ref = "refs/tags/v1.0"
|
||||
wrongBranchReq := &common.Request{Type: common.RequestType_Push, Data: &wrongBranchEvent}
|
||||
|
||||
err := processor.ProcessFunc(wrongBranchReq)
|
||||
if err == nil {
|
||||
t.Error("Expected error for wrong branch ref, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Config Not Found", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{}
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error even if config not found, got %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -4,15 +4,11 @@ OBS Staging Bot
|
||||
Build a PR against a ProjectGit, if review is requested.
|
||||
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* A build in OBS is initiated when a review for this bot is requested.
|
||||
* The overall build status is reported:
|
||||
* Build successful
|
||||
* Build failed
|
||||
* It checks the build status only for the involved packages compared to the last state of the project for all architectures and all flavors.
|
||||
* It adds an svg with detailed building status.
|
||||
* Monitors Notification API in Gitea for review requests
|
||||
* Reviews Package build results in OBS for all changed packages in ProjectGit PR
|
||||
|
||||
|
||||
Target Usage
|
||||
@@ -25,46 +21,22 @@ Configuration File
|
||||
------------------
|
||||
|
||||
Bot reads `staging.config` from the project git or the PR to the project git.
|
||||
It's a JSON file with following syntax:
|
||||
It's a JSON file with following syntax
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"ObsProject": "SUSE:SLFO:1.2",
|
||||
"StagingProject": "SUSE:SLFO:1.2:PullRequest",
|
||||
"QA": [
|
||||
{
|
||||
"Name": "SLES",
|
||||
"Origin": "SUSE:SLFO:Products:SLES:16.0"
|
||||
}
|
||||
]
|
||||
"ObsProject": "home:foo:project",
|
||||
"StagingProject": "home:foo:project:staging",
|
||||
"QA": [
|
||||
{
|
||||
"Name": "ProjectBuild",
|
||||
"Origin": "home:foo:product:images"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *ObsProject* | Product OBS project. Builds in this project will be used to compare to builds based on sources from the PR. | yes | string | `[a-zA-Z0-9-_:]+` | |
|
||||
| *StagingProject* | Used both as base project and prefix for all OBS staging projects. Upon being added as a reviewer to a PrjGit PR, this bot automatically generates an OBS project named *StagingProject:<PR_Number>*. It must be a sub-project of the *ObsProject*. | yes | string | `[a-zA-Z0-9-_:]+` | |
|
||||
| *QA* | Crucial for generating a product build (such as an ISO or FTP tree) that incorporates the packages. | no | array of objects | | |
|
||||
| *QA > Name* | Suffix for the QA OBS staging project. The project is named *StagingProject:<PR_Number>:Name*. | no | string | | |
|
||||
| *QA > Origin* | OBS reference project | no | string | | |
|
||||
|
||||
|
||||
Details
|
||||
-------
|
||||
|
||||
* **OBS staging projects are deleted** when the relative PrjGit PR is closed or merged.
|
||||
|
||||
* **PrjGit PR - staging project**
|
||||
* The OBS staging project utilizes an **scmsync** tag, configured with the `onlybuild` flag, to exclusively build packages associated with this specific PrjGit PR.
|
||||
* The **build config** is inherited from the PrjGit PR config file (even if unchanged).
|
||||
* The **project meta** creates a standard repository following the StagingProject as a project path.
|
||||
* The base *StagingProject* has the macro **FromScratch:** set in its config, which prevents inheriting the configuration from the included project paths.
|
||||
* The bot copies the project maintainers from *StagingProject* to the specific staging project (*StagingProject:<PR_Number>*).
|
||||
* The bot reports “Build successful” only if the build is successful for all repositories and all architectures.
|
||||
|
||||
* **PrjGit PR - QA staging project**
|
||||
* The QA staging project is meant for building the product; the relative build config is inherited from the `QA > Origin` project.
|
||||
* In this case, the **scmsync** tag is inherited from the `QA > Origin` project.
|
||||
|
||||
|
||||
* ObsProject: (**required**) Project where the base project is built. Builds in this project will be used to compare to builds based on sources from the PR
|
||||
* StagingProject: template project that will be used as template for the staging project. Omitting this will use the ObsProject repositories to create the staging. Staging project will be created under the template, or in the bot's home directory if not specified.
|
||||
* QA: set of projects to build ontop of the binaries built in staging.
|
||||
|
||||
|
||||
@@ -386,28 +386,6 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
}
|
||||
// patch baseMeta to become the new project
|
||||
templateMeta.Name = stagingProject + ":" + subProjectName
|
||||
// freeze tag for now
|
||||
if len(templateMeta.ScmSync) > 0 {
|
||||
repository, err := url.Parse(templateMeta.ScmSync)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
common.LogDebug("getting data for ", repository.EscapedPath())
|
||||
split := strings.Split(repository.EscapedPath(), "/")
|
||||
org, repo := split[1], split[2]
|
||||
|
||||
common.LogDebug("getting commit for ", org, " repo ", repo, " fragment ", repository.Fragment)
|
||||
branch, err := gitea.GetCommit(org, repo, repository.Fragment)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// set expanded commit url
|
||||
repository.Fragment = branch.SHA
|
||||
templateMeta.ScmSync = repository.String()
|
||||
common.LogDebug("Setting scmsync url to ", templateMeta.ScmSync)
|
||||
}
|
||||
// Cleanup ReleaseTarget and modify affected path entries
|
||||
for idx, r := range templateMeta.Repositories {
|
||||
templateMeta.Repositories[idx].ReleaseTargets = nil
|
||||
@@ -1066,7 +1044,6 @@ func main() {
|
||||
ObsWebHost = ObsWebHostFromApiHost(*obsApiHost)
|
||||
}
|
||||
|
||||
common.LogDebug("OBS Gitea Host:", GiteaUrl)
|
||||
common.LogDebug("OBS Web Host:", ObsWebHost)
|
||||
common.LogDebug("OBS API Host:", *obsApiHost)
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
OBS Status Service
|
||||
==================
|
||||
|
||||
Reports build status of OBS service as an easily to produce SVG. Repository
|
||||
results (build results) are cached for 10 seconds and repository listing
|
||||
for OBS instance are cached for 5 minutes -- new repositories take up to
|
||||
5 minutes to be visible.
|
||||
Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
Requests for individual build results:
|
||||
|
||||
@@ -20,31 +17,19 @@ Get requests for / will also return 404 statu normally. If the Backend redis
|
||||
server is not available, it will return 500
|
||||
|
||||
|
||||
By default, SVG output is generated, suitable for inclusion. But JSON and XML
|
||||
output is possible by setting `Accept:` request header
|
||||
|
||||
| Accept Request Header | Output format
|
||||
|------------------------|---------------------
|
||||
| | SVG image
|
||||
| application/json | JSON data
|
||||
| application/obs+xml | XML output
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Fetch and cache internal data from OBS and present it in usable format:
|
||||
+ Generate SVG output for specific OBS project or package
|
||||
+ Generate JSON/XML output for automated processing
|
||||
* Low-overhead
|
||||
* Monitors RabbitMQ interface for notification of OBS package and project status
|
||||
* Produces SVG output based on GET request
|
||||
* Cache results (sqlite) and periodically update results from OBS (in case of messages are missing)
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
* inside README.md of package git or project git
|
||||
* README.md of package git or project git
|
||||
* comment section of a Gitea PR
|
||||
* automated build result processing
|
||||
|
||||
Running
|
||||
-------
|
||||
@@ -57,4 +42,3 @@ Default parameters can be changed by env variables
|
||||
| `OBS_STATUS_SERVICE_LISTEN` | [::1]:8080 | Listening address and port
|
||||
| `OBS_STATUS_SERVICE_CERT` | /run/obs-status-service.pem | Location of certificate file for service
|
||||
| `OBS_STATUS_SERVICE_KEY` | /run/obs-status-service.pem | Location of key file for service
|
||||
| `REDIS` | | OBS's Redis instance URL
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -20,7 +20,6 @@ package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"flag"
|
||||
"fmt"
|
||||
"html"
|
||||
@@ -206,17 +205,6 @@ func WriteJson(data any, res http.ResponseWriter) {
|
||||
}
|
||||
}
|
||||
|
||||
func WriteXml(data any, res http.ResponseWriter) {
|
||||
if xmlData, err := xml.MarshalIndent(data, "", " "); err != nil {
|
||||
res.WriteHeader(500)
|
||||
} else {
|
||||
res.Header().Add("size", fmt.Sprint(len(xmlData)))
|
||||
res.Write([]byte("<resultlist>"))
|
||||
res.Write(xmlData)
|
||||
res.Write([]byte("</resultlist>"))
|
||||
}
|
||||
}
|
||||
|
||||
var ObsUrl *string
|
||||
|
||||
func main() {
|
||||
@@ -277,7 +265,7 @@ func main() {
|
||||
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
|
||||
mime := ParseMimeHeader(req)
|
||||
obsPrj := req.PathValue("Project")
|
||||
common.LogInfo(" GET /status/"+obsPrj, "["+mime.MimeType()+"]")
|
||||
common.LogInfo(" GET /status/" + obsPrj, "[" + mime.MimeType() + "]")
|
||||
|
||||
status := FindAndUpdateProjectResults(obsPrj)
|
||||
if len(status) == 0 {
|
||||
@@ -291,21 +279,17 @@ func main() {
|
||||
res.Write(svg)
|
||||
} else if mime.IsJson() {
|
||||
WriteJson(status, res)
|
||||
} else if mime.IsXml() {
|
||||
WriteXml(status, res)
|
||||
}
|
||||
})
|
||||
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
|
||||
mime := ParseMimeHeader(req)
|
||||
obsPrj := req.PathValue("Project")
|
||||
obsPkg := req.PathValue("Package")
|
||||
common.LogInfo(" GET /status/"+obsPrj+"/"+obsPkg, "["+mime.MimeType()+"]")
|
||||
common.LogInfo(" GET /status/" + obsPrj + "/" + obsPkg, "[" + mime.MimeType() + "]")
|
||||
|
||||
status := slices.Clone(FindAndUpdateProjectResults(obsPrj))
|
||||
for i, s := range status {
|
||||
f := *s
|
||||
f.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
|
||||
status[i] = &f
|
||||
for _, s := range status {
|
||||
s.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
|
||||
}
|
||||
if len(status) == 0 {
|
||||
res.WriteHeader(404)
|
||||
@@ -320,8 +304,6 @@ func main() {
|
||||
res.Write(svg)
|
||||
} else if mime.IsJson() {
|
||||
WriteJson(status, res)
|
||||
} else if mime.IsXml() {
|
||||
WriteXml(status, res)
|
||||
}
|
||||
|
||||
})
|
||||
@@ -330,13 +312,11 @@ func main() {
|
||||
obsPrj := req.PathValue("Project")
|
||||
obsPkg := req.PathValue("Package")
|
||||
repo := req.PathValue("Repository")
|
||||
common.LogInfo(" GET /status/"+obsPrj+"/"+obsPkg, "["+mime.MimeType()+"]")
|
||||
common.LogInfo(" GET /status/" + obsPrj + "/" + obsPkg, "[" + mime.MimeType() + "]")
|
||||
|
||||
status := slices.Clone(FindAndUpdateRepoResults(obsPrj, repo))
|
||||
for i, s := range status {
|
||||
f := *s
|
||||
f.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
|
||||
status[i] = &f
|
||||
for _, s := range status {
|
||||
s.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
|
||||
}
|
||||
if len(status) == 0 {
|
||||
res.WriteHeader(404)
|
||||
@@ -350,8 +330,6 @@ func main() {
|
||||
res.Write(svg)
|
||||
} else if mime.IsJson() {
|
||||
WriteJson(status, res)
|
||||
} else if mime.IsXml() {
|
||||
WriteXml(status, res)
|
||||
}
|
||||
})
|
||||
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
|
||||
@@ -360,7 +338,7 @@ func main() {
|
||||
pkg := req.PathValue("Package")
|
||||
repo := req.PathValue("Repository")
|
||||
arch := req.PathValue("Arch")
|
||||
common.LogInfo(" GET /status/"+prj+"/"+pkg+"/"+repo+"/"+arch, "["+mime.MimeType()+"]")
|
||||
common.LogInfo(" GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch, "[" + mime.MimeType() + "]")
|
||||
|
||||
res.Header().Add("content-type", mime.MimeHeader)
|
||||
for _, r := range FindAndUpdateRepoResults(prj, repo) {
|
||||
@@ -371,8 +349,6 @@ func main() {
|
||||
res.Write(BuildStatusSvg(r, status))
|
||||
} else if mime.IsJson() {
|
||||
WriteJson(status, res)
|
||||
} else if mime.IsXml() {
|
||||
WriteXml(status, res)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"compress/bzip2"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
@@ -85,36 +82,3 @@ func TestStatusSvg(t *testing.T) {
|
||||
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
|
||||
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
|
||||
}
|
||||
|
||||
func TestFactoryResults(t *testing.T) {
|
||||
data, err := os.Open("factory.results.json.bz2")
|
||||
if err != nil {
|
||||
t.Fatal("Openning factory.results.json.bz2 failed:", err)
|
||||
}
|
||||
UncompressedData, err := io.ReadAll(bzip2.NewReader(data))
|
||||
if err != nil {
|
||||
t.Fatal("Reading factory.results.json.bz2 failed:", err)
|
||||
}
|
||||
|
||||
var results []*common.BuildResult
|
||||
if err := json.Unmarshal(UncompressedData, &results); err != nil {
|
||||
t.Fatal("Failed parsing test data", err)
|
||||
}
|
||||
|
||||
// add tests here
|
||||
tests := []struct {
|
||||
name string
|
||||
}{
|
||||
// add test data here
|
||||
{
|
||||
name: "First test",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
// and test code here
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,14 +11,12 @@ type MimeHeader struct {
|
||||
|
||||
const (
|
||||
JsonMime = "application/json"
|
||||
XmlMime = "application/obs+xml"
|
||||
SvgMime = "image/svg+xml"
|
||||
)
|
||||
|
||||
var AcceptedStatusMimes []string = []string{
|
||||
SvgMime,
|
||||
JsonMime,
|
||||
XmlMime,
|
||||
}
|
||||
|
||||
func ParseMimeHeader(req *http.Request) *MimeHeader {
|
||||
@@ -43,10 +41,6 @@ func (m *MimeHeader) IsJson() bool {
|
||||
return m.MimeHeader == JsonMime
|
||||
}
|
||||
|
||||
func (m *MimeHeader) IsXml() bool {
|
||||
return m.MimeHeader == XmlMime
|
||||
}
|
||||
|
||||
func (m *MimeHeader) IsSvg() bool {
|
||||
return m.MimeHeader == SvgMime
|
||||
}
|
||||
@@ -54,8 +48,6 @@ func (m *MimeHeader) IsSvg() bool {
|
||||
func (m *MimeHeader) MimeType() string {
|
||||
if m.IsJson() {
|
||||
return JsonMime
|
||||
} else if m.IsXml() {
|
||||
return XmlMime
|
||||
}
|
||||
|
||||
return SvgMime // default
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"html"
|
||||
"net/url"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type SvgWriter struct {
|
||||
@@ -134,7 +133,7 @@ func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
|
||||
}
|
||||
|
||||
func (svg *SvgWriter) WriteProjectStatus(project, repo, arch, status string, count int) {
|
||||
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&" + url.QueryEscape(status) + "=1&arch_" + url.QueryEscape(arch) + "=1&repo_" + url.QueryEscape(strings.ReplaceAll(repo, ".", "_")) + "=1")
|
||||
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&" + url.QueryEscape(status) + "=1&arch_" + url.QueryEscape(arch) + "=1&repo_" + url.QueryEscape(repo) + "=1")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=Group Review bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/group-review %i
|
||||
EnvironmentFile=-/etc/default/group-review/%i.env
|
||||
DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
[Unit]
|
||||
Description=WorkflowDirect git bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/workflow-direct
|
||||
EnvironmentFile=-/etc/default/%i/workflow-direct.env
|
||||
DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
RuntimeDirectory=%i
|
||||
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
|
||||
BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
WorkingDirectory=%t/%i
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
12
vendor/go.uber.org/mock/AUTHORS
generated
vendored
12
vendor/go.uber.org/mock/AUTHORS
generated
vendored
@@ -1,12 +0,0 @@
|
||||
# This is the official list of GoMock authors for copyright purposes.
|
||||
# This file is distinct from the CONTRIBUTORS files.
|
||||
# See the latter for an explanation.
|
||||
|
||||
# Names should be added to this file as
|
||||
# Name or Organization <email address>
|
||||
# The email address is not required for organizations.
|
||||
|
||||
# Please keep the list sorted.
|
||||
|
||||
Alex Reece <awreece@gmail.com>
|
||||
Google Inc.
|
||||
202
vendor/go.uber.org/mock/LICENSE
generated
vendored
202
vendor/go.uber.org/mock/LICENSE
generated
vendored
@@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
506
vendor/go.uber.org/mock/gomock/call.go
generated
vendored
506
vendor/go.uber.org/mock/gomock/call.go
generated
vendored
@@ -1,506 +0,0 @@
|
||||
// Copyright 2010 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gomock
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Call represents an expected call to a mock.
|
||||
type Call struct {
|
||||
t TestHelper // for triggering test failures on invalid call setup
|
||||
|
||||
receiver any // the receiver of the method call
|
||||
method string // the name of the method
|
||||
methodType reflect.Type // the type of the method
|
||||
args []Matcher // the args
|
||||
origin string // file and line number of call setup
|
||||
|
||||
preReqs []*Call // prerequisite calls
|
||||
|
||||
// Expectations
|
||||
minCalls, maxCalls int
|
||||
|
||||
numCalls int // actual number made
|
||||
|
||||
// actions are called when this Call is called. Each action gets the args and
|
||||
// can set the return values by returning a non-nil slice. Actions run in the
|
||||
// order they are created.
|
||||
actions []func([]any) []any
|
||||
}
|
||||
|
||||
// newCall creates a *Call. It requires the method type in order to support
|
||||
// unexported methods.
|
||||
func newCall(t TestHelper, receiver any, method string, methodType reflect.Type, args ...any) *Call {
|
||||
t.Helper()
|
||||
|
||||
// TODO: check arity, types.
|
||||
mArgs := make([]Matcher, len(args))
|
||||
for i, arg := range args {
|
||||
if m, ok := arg.(Matcher); ok {
|
||||
mArgs[i] = m
|
||||
} else if arg == nil {
|
||||
// Handle nil specially so that passing a nil interface value
|
||||
// will match the typed nils of concrete args.
|
||||
mArgs[i] = Nil()
|
||||
} else {
|
||||
mArgs[i] = Eq(arg)
|
||||
}
|
||||
}
|
||||
|
||||
// callerInfo's skip should be updated if the number of calls between the user's test
|
||||
// and this line changes, i.e. this code is wrapped in another anonymous function.
|
||||
// 0 is us, 1 is RecordCallWithMethodType(), 2 is the generated recorder, and 3 is the user's test.
|
||||
origin := callerInfo(3)
|
||||
actions := []func([]any) []any{func([]any) []any {
|
||||
// Synthesize the zero value for each of the return args' types.
|
||||
rets := make([]any, methodType.NumOut())
|
||||
for i := 0; i < methodType.NumOut(); i++ {
|
||||
rets[i] = reflect.Zero(methodType.Out(i)).Interface()
|
||||
}
|
||||
return rets
|
||||
}}
|
||||
return &Call{
|
||||
t: t, receiver: receiver, method: method, methodType: methodType,
|
||||
args: mArgs, origin: origin, minCalls: 1, maxCalls: 1, actions: actions,
|
||||
}
|
||||
}
|
||||
|
||||
// AnyTimes allows the expectation to be called 0 or more times
|
||||
func (c *Call) AnyTimes() *Call {
|
||||
c.minCalls, c.maxCalls = 0, 1e8 // close enough to infinity
|
||||
return c
|
||||
}
|
||||
|
||||
// MinTimes requires the call to occur at least n times. If AnyTimes or MaxTimes have not been called or if MaxTimes
|
||||
// was previously called with 1, MinTimes also sets the maximum number of calls to infinity.
|
||||
func (c *Call) MinTimes(n int) *Call {
|
||||
c.minCalls = n
|
||||
if c.maxCalls == 1 {
|
||||
c.maxCalls = 1e8
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// MaxTimes limits the number of calls to n times. If AnyTimes or MinTimes have not been called or if MinTimes was
|
||||
// previously called with 1, MaxTimes also sets the minimum number of calls to 0.
|
||||
func (c *Call) MaxTimes(n int) *Call {
|
||||
c.maxCalls = n
|
||||
if c.minCalls == 1 {
|
||||
c.minCalls = 0
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn declares the action to run when the call is matched.
|
||||
// The return values from this function are returned by the mocked function.
|
||||
// It takes an any argument to support n-arity functions.
|
||||
// The anonymous function must match the function signature mocked method.
|
||||
func (c *Call) DoAndReturn(f any) *Call {
|
||||
// TODO: Check arity and types here, rather than dying badly elsewhere.
|
||||
v := reflect.ValueOf(f)
|
||||
|
||||
c.addAction(func(args []any) []any {
|
||||
c.t.Helper()
|
||||
ft := v.Type()
|
||||
if c.methodType.NumIn() != ft.NumIn() {
|
||||
if ft.IsVariadic() {
|
||||
c.t.Fatalf("wrong number of arguments in DoAndReturn func for %T.%v The function signature must match the mocked method, a variadic function cannot be used.",
|
||||
c.receiver, c.method)
|
||||
} else {
|
||||
c.t.Fatalf("wrong number of arguments in DoAndReturn func for %T.%v: got %d, want %d [%s]",
|
||||
c.receiver, c.method, ft.NumIn(), c.methodType.NumIn(), c.origin)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
vArgs := make([]reflect.Value, len(args))
|
||||
for i := 0; i < len(args); i++ {
|
||||
if args[i] != nil {
|
||||
vArgs[i] = reflect.ValueOf(args[i])
|
||||
} else {
|
||||
// Use the zero value for the arg.
|
||||
vArgs[i] = reflect.Zero(ft.In(i))
|
||||
}
|
||||
}
|
||||
vRets := v.Call(vArgs)
|
||||
rets := make([]any, len(vRets))
|
||||
for i, ret := range vRets {
|
||||
rets[i] = ret.Interface()
|
||||
}
|
||||
return rets
|
||||
})
|
||||
return c
|
||||
}
|
||||
|
||||
// Do declares the action to run when the call is matched. The function's
|
||||
// return values are ignored to retain backward compatibility. To use the
|
||||
// return values call DoAndReturn.
|
||||
// It takes an any argument to support n-arity functions.
|
||||
// The anonymous function must match the function signature mocked method.
|
||||
func (c *Call) Do(f any) *Call {
|
||||
// TODO: Check arity and types here, rather than dying badly elsewhere.
|
||||
v := reflect.ValueOf(f)
|
||||
|
||||
c.addAction(func(args []any) []any {
|
||||
c.t.Helper()
|
||||
ft := v.Type()
|
||||
if c.methodType.NumIn() != ft.NumIn() {
|
||||
if ft.IsVariadic() {
|
||||
c.t.Fatalf("wrong number of arguments in Do func for %T.%v The function signature must match the mocked method, a variadic function cannot be used.",
|
||||
c.receiver, c.method)
|
||||
} else {
|
||||
c.t.Fatalf("wrong number of arguments in Do func for %T.%v: got %d, want %d [%s]",
|
||||
c.receiver, c.method, ft.NumIn(), c.methodType.NumIn(), c.origin)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
vArgs := make([]reflect.Value, len(args))
|
||||
for i := 0; i < len(args); i++ {
|
||||
if args[i] != nil {
|
||||
vArgs[i] = reflect.ValueOf(args[i])
|
||||
} else {
|
||||
// Use the zero value for the arg.
|
||||
vArgs[i] = reflect.Zero(ft.In(i))
|
||||
}
|
||||
}
|
||||
v.Call(vArgs)
|
||||
return nil
|
||||
})
|
||||
return c
|
||||
}
|
||||
|
||||
// Return declares the values to be returned by the mocked function call.
|
||||
func (c *Call) Return(rets ...any) *Call {
|
||||
c.t.Helper()
|
||||
|
||||
mt := c.methodType
|
||||
if len(rets) != mt.NumOut() {
|
||||
c.t.Fatalf("wrong number of arguments to Return for %T.%v: got %d, want %d [%s]",
|
||||
c.receiver, c.method, len(rets), mt.NumOut(), c.origin)
|
||||
}
|
||||
for i, ret := range rets {
|
||||
if got, want := reflect.TypeOf(ret), mt.Out(i); got == want {
|
||||
// Identical types; nothing to do.
|
||||
} else if got == nil {
|
||||
// Nil needs special handling.
|
||||
switch want.Kind() {
|
||||
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
// ok
|
||||
default:
|
||||
c.t.Fatalf("argument %d to Return for %T.%v is nil, but %v is not nillable [%s]",
|
||||
i, c.receiver, c.method, want, c.origin)
|
||||
}
|
||||
} else if got.AssignableTo(want) {
|
||||
// Assignable type relation. Make the assignment now so that the generated code
|
||||
// can return the values with a type assertion.
|
||||
v := reflect.New(want).Elem()
|
||||
v.Set(reflect.ValueOf(ret))
|
||||
rets[i] = v.Interface()
|
||||
} else {
|
||||
c.t.Fatalf("wrong type of argument %d to Return for %T.%v: %v is not assignable to %v [%s]",
|
||||
i, c.receiver, c.method, got, want, c.origin)
|
||||
}
|
||||
}
|
||||
|
||||
c.addAction(func([]any) []any {
|
||||
return rets
|
||||
})
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// Times declares the exact number of times a function call is expected to be executed.
|
||||
func (c *Call) Times(n int) *Call {
|
||||
c.minCalls, c.maxCalls = n, n
|
||||
return c
|
||||
}
|
||||
|
||||
// SetArg declares an action that will set the nth argument's value,
|
||||
// indirected through a pointer. Or, in the case of a slice and map, SetArg
|
||||
// will copy value's elements/key-value pairs into the nth argument.
|
||||
func (c *Call) SetArg(n int, value any) *Call {
|
||||
c.t.Helper()
|
||||
|
||||
mt := c.methodType
|
||||
// TODO: This will break on variadic methods.
|
||||
// We will need to check those at invocation time.
|
||||
if n < 0 || n >= mt.NumIn() {
|
||||
c.t.Fatalf("SetArg(%d, ...) called for a method with %d args [%s]",
|
||||
n, mt.NumIn(), c.origin)
|
||||
}
|
||||
// Permit setting argument through an interface.
|
||||
// In the interface case, we don't (nay, can't) check the type here.
|
||||
at := mt.In(n)
|
||||
switch at.Kind() {
|
||||
case reflect.Ptr:
|
||||
dt := at.Elem()
|
||||
if vt := reflect.TypeOf(value); !vt.AssignableTo(dt) {
|
||||
c.t.Fatalf("SetArg(%d, ...) argument is a %v, not assignable to %v [%s]",
|
||||
n, vt, dt, c.origin)
|
||||
}
|
||||
case reflect.Interface, reflect.Slice, reflect.Map:
|
||||
// nothing to do
|
||||
default:
|
||||
c.t.Fatalf("SetArg(%d, ...) referring to argument of non-pointer non-interface non-slice non-map type %v [%s]",
|
||||
n, at, c.origin)
|
||||
}
|
||||
|
||||
c.addAction(func(args []any) []any {
|
||||
v := reflect.ValueOf(value)
|
||||
switch reflect.TypeOf(args[n]).Kind() {
|
||||
case reflect.Slice:
|
||||
setSlice(args[n], v)
|
||||
case reflect.Map:
|
||||
setMap(args[n], v)
|
||||
default:
|
||||
reflect.ValueOf(args[n]).Elem().Set(v)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return c
|
||||
}
|
||||
|
||||
// isPreReq returns true if other is a direct or indirect prerequisite to c.
|
||||
func (c *Call) isPreReq(other *Call) bool {
|
||||
for _, preReq := range c.preReqs {
|
||||
if other == preReq || preReq.isPreReq(other) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// After declares that the call may only match after preReq has been exhausted.
|
||||
func (c *Call) After(preReq *Call) *Call {
|
||||
c.t.Helper()
|
||||
|
||||
if c == preReq {
|
||||
c.t.Fatalf("A call isn't allowed to be its own prerequisite")
|
||||
}
|
||||
if preReq.isPreReq(c) {
|
||||
c.t.Fatalf("Loop in call order: %v is a prerequisite to %v (possibly indirectly).", c, preReq)
|
||||
}
|
||||
|
||||
c.preReqs = append(c.preReqs, preReq)
|
||||
return c
|
||||
}
|
||||
|
||||
// Returns true if the minimum number of calls have been made.
|
||||
func (c *Call) satisfied() bool {
|
||||
return c.numCalls >= c.minCalls
|
||||
}
|
||||
|
||||
// Returns true if the maximum number of calls have been made.
|
||||
func (c *Call) exhausted() bool {
|
||||
return c.numCalls >= c.maxCalls
|
||||
}
|
||||
|
||||
func (c *Call) String() string {
|
||||
args := make([]string, len(c.args))
|
||||
for i, arg := range c.args {
|
||||
args[i] = arg.String()
|
||||
}
|
||||
arguments := strings.Join(args, ", ")
|
||||
return fmt.Sprintf("%T.%v(%s) %s", c.receiver, c.method, arguments, c.origin)
|
||||
}
|
||||
|
||||
// Tests if the given call matches the expected call.
|
||||
// If yes, returns nil. If no, returns error with message explaining why it does not match.
|
||||
func (c *Call) matches(args []any) error {
|
||||
if !c.methodType.IsVariadic() {
|
||||
if len(args) != len(c.args) {
|
||||
return fmt.Errorf("expected call at %s has the wrong number of arguments. Got: %d, want: %d",
|
||||
c.origin, len(args), len(c.args))
|
||||
}
|
||||
|
||||
for i, m := range c.args {
|
||||
if !m.Matches(args[i]) {
|
||||
return fmt.Errorf(
|
||||
"expected call at %s doesn't match the argument at index %d.\nGot: %v\nWant: %v",
|
||||
c.origin, i, formatGottenArg(m, args[i]), m,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if len(c.args) < c.methodType.NumIn()-1 {
|
||||
return fmt.Errorf("expected call at %s has the wrong number of matchers. Got: %d, want: %d",
|
||||
c.origin, len(c.args), c.methodType.NumIn()-1)
|
||||
}
|
||||
if len(c.args) != c.methodType.NumIn() && len(args) != len(c.args) {
|
||||
return fmt.Errorf("expected call at %s has the wrong number of arguments. Got: %d, want: %d",
|
||||
c.origin, len(args), len(c.args))
|
||||
}
|
||||
if len(args) < len(c.args)-1 {
|
||||
return fmt.Errorf("expected call at %s has the wrong number of arguments. Got: %d, want: greater than or equal to %d",
|
||||
c.origin, len(args), len(c.args)-1)
|
||||
}
|
||||
|
||||
for i, m := range c.args {
|
||||
if i < c.methodType.NumIn()-1 {
|
||||
// Non-variadic args
|
||||
if !m.Matches(args[i]) {
|
||||
return fmt.Errorf("expected call at %s doesn't match the argument at index %s.\nGot: %v\nWant: %v",
|
||||
c.origin, strconv.Itoa(i), formatGottenArg(m, args[i]), m)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// The last arg has a possibility of a variadic argument, so let it branch
|
||||
|
||||
// sample: Foo(a int, b int, c ...int)
|
||||
if i < len(c.args) && i < len(args) {
|
||||
if m.Matches(args[i]) {
|
||||
// Got Foo(a, b, c) want Foo(matcherA, matcherB, gomock.Any())
|
||||
// Got Foo(a, b, c) want Foo(matcherA, matcherB, someSliceMatcher)
|
||||
// Got Foo(a, b, c) want Foo(matcherA, matcherB, matcherC)
|
||||
// Got Foo(a, b) want Foo(matcherA, matcherB)
|
||||
// Got Foo(a, b, c, d) want Foo(matcherA, matcherB, matcherC, matcherD)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// The number of actual args don't match the number of matchers,
|
||||
// or the last matcher is a slice and the last arg is not.
|
||||
// If this function still matches it is because the last matcher
|
||||
// matches all the remaining arguments or the lack of any.
|
||||
// Convert the remaining arguments, if any, into a slice of the
|
||||
// expected type.
|
||||
vArgsType := c.methodType.In(c.methodType.NumIn() - 1)
|
||||
vArgs := reflect.MakeSlice(vArgsType, 0, len(args)-i)
|
||||
for _, arg := range args[i:] {
|
||||
vArgs = reflect.Append(vArgs, reflect.ValueOf(arg))
|
||||
}
|
||||
if m.Matches(vArgs.Interface()) {
|
||||
// Got Foo(a, b, c, d, e) want Foo(matcherA, matcherB, gomock.Any())
|
||||
// Got Foo(a, b, c, d, e) want Foo(matcherA, matcherB, someSliceMatcher)
|
||||
// Got Foo(a, b) want Foo(matcherA, matcherB, gomock.Any())
|
||||
// Got Foo(a, b) want Foo(matcherA, matcherB, someEmptySliceMatcher)
|
||||
break
|
||||
}
|
||||
// Wrong number of matchers or not match. Fail.
|
||||
// Got Foo(a, b) want Foo(matcherA, matcherB, matcherC, matcherD)
|
||||
// Got Foo(a, b, c) want Foo(matcherA, matcherB, matcherC, matcherD)
|
||||
// Got Foo(a, b, c, d) want Foo(matcherA, matcherB, matcherC, matcherD, matcherE)
|
||||
// Got Foo(a, b, c, d, e) want Foo(matcherA, matcherB, matcherC, matcherD)
|
||||
// Got Foo(a, b, c) want Foo(matcherA, matcherB)
|
||||
|
||||
return fmt.Errorf("expected call at %s doesn't match the argument at index %s.\nGot: %v\nWant: %v",
|
||||
c.origin, strconv.Itoa(i), formatGottenArg(m, args[i:]), c.args[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Check that all prerequisite calls have been satisfied.
|
||||
for _, preReqCall := range c.preReqs {
|
||||
if !preReqCall.satisfied() {
|
||||
return fmt.Errorf("expected call at %s doesn't have a prerequisite call satisfied:\n%v\nshould be called before:\n%v",
|
||||
c.origin, preReqCall, c)
|
||||
}
|
||||
}
|
||||
|
||||
// Check that the call is not exhausted.
|
||||
if c.exhausted() {
|
||||
return fmt.Errorf("expected call at %s has already been called the max number of times", c.origin)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// dropPrereqs tells the expected Call to not re-check prerequisite calls any
|
||||
// longer, and to return its current set.
|
||||
func (c *Call) dropPrereqs() (preReqs []*Call) {
|
||||
preReqs = c.preReqs
|
||||
c.preReqs = nil
|
||||
return
|
||||
}
|
||||
|
||||
func (c *Call) call() []func([]any) []any {
|
||||
c.numCalls++
|
||||
return c.actions
|
||||
}
|
||||
|
||||
// InOrder declares that the given calls should occur in order.
|
||||
// It panics if the type of any of the arguments isn't *Call or a generated
|
||||
// mock with an embedded *Call.
|
||||
func InOrder(args ...any) {
|
||||
calls := make([]*Call, 0, len(args))
|
||||
for i := 0; i < len(args); i++ {
|
||||
if call := getCall(args[i]); call != nil {
|
||||
calls = append(calls, call)
|
||||
continue
|
||||
}
|
||||
panic(fmt.Sprintf(
|
||||
"invalid argument at position %d of type %T, InOrder expects *gomock.Call or generated mock types with an embedded *gomock.Call",
|
||||
i,
|
||||
args[i],
|
||||
))
|
||||
}
|
||||
for i := 1; i < len(calls); i++ {
|
||||
calls[i].After(calls[i-1])
|
||||
}
|
||||
}
|
||||
|
||||
// getCall checks if the parameter is a *Call or a generated struct
|
||||
// that wraps a *Call and returns the *Call pointer - if neither, it returns nil.
|
||||
func getCall(arg any) *Call {
|
||||
if call, ok := arg.(*Call); ok {
|
||||
return call
|
||||
}
|
||||
t := reflect.ValueOf(arg)
|
||||
if t.Kind() != reflect.Ptr && t.Kind() != reflect.Interface {
|
||||
return nil
|
||||
}
|
||||
t = t.Elem()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
if !f.CanInterface() {
|
||||
continue
|
||||
}
|
||||
if call, ok := f.Interface().(*Call); ok {
|
||||
return call
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSlice(arg any, v reflect.Value) {
|
||||
va := reflect.ValueOf(arg)
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
va.Index(i).Set(v.Index(i))
|
||||
}
|
||||
}
|
||||
|
||||
func setMap(arg any, v reflect.Value) {
|
||||
va := reflect.ValueOf(arg)
|
||||
for _, e := range va.MapKeys() {
|
||||
va.SetMapIndex(e, reflect.Value{})
|
||||
}
|
||||
for _, e := range v.MapKeys() {
|
||||
va.SetMapIndex(e, v.MapIndex(e))
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Call) addAction(action func([]any) []any) {
|
||||
c.actions = append(c.actions, action)
|
||||
}
|
||||
|
||||
func formatGottenArg(m Matcher, arg any) string {
|
||||
got := fmt.Sprintf("%v (%T)", arg, arg)
|
||||
if gs, ok := m.(GotFormatter); ok {
|
||||
got = gs.Got(arg)
|
||||
}
|
||||
return got
|
||||
}
|
||||
164
vendor/go.uber.org/mock/gomock/callset.go
generated
vendored
164
vendor/go.uber.org/mock/gomock/callset.go
generated
vendored
@@ -1,164 +0,0 @@
|
||||
// Copyright 2011 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gomock
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// callSet represents a set of expected calls, indexed by receiver and method
|
||||
// name.
|
||||
type callSet struct {
|
||||
// Calls that are still expected.
|
||||
expected map[callSetKey][]*Call
|
||||
expectedMu *sync.Mutex
|
||||
// Calls that have been exhausted.
|
||||
exhausted map[callSetKey][]*Call
|
||||
// when set to true, existing call expectations are overridden when new call expectations are made
|
||||
allowOverride bool
|
||||
}
|
||||
|
||||
// callSetKey is the key in the maps in callSet
|
||||
type callSetKey struct {
|
||||
receiver any
|
||||
fname string
|
||||
}
|
||||
|
||||
func newCallSet() *callSet {
|
||||
return &callSet{
|
||||
expected: make(map[callSetKey][]*Call),
|
||||
expectedMu: &sync.Mutex{},
|
||||
exhausted: make(map[callSetKey][]*Call),
|
||||
}
|
||||
}
|
||||
|
||||
func newOverridableCallSet() *callSet {
|
||||
return &callSet{
|
||||
expected: make(map[callSetKey][]*Call),
|
||||
expectedMu: &sync.Mutex{},
|
||||
exhausted: make(map[callSetKey][]*Call),
|
||||
allowOverride: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Add adds a new expected call.
|
||||
func (cs callSet) Add(call *Call) {
|
||||
key := callSetKey{call.receiver, call.method}
|
||||
|
||||
cs.expectedMu.Lock()
|
||||
defer cs.expectedMu.Unlock()
|
||||
|
||||
m := cs.expected
|
||||
if call.exhausted() {
|
||||
m = cs.exhausted
|
||||
}
|
||||
if cs.allowOverride {
|
||||
m[key] = make([]*Call, 0)
|
||||
}
|
||||
|
||||
m[key] = append(m[key], call)
|
||||
}
|
||||
|
||||
// Remove removes an expected call.
|
||||
func (cs callSet) Remove(call *Call) {
|
||||
key := callSetKey{call.receiver, call.method}
|
||||
|
||||
cs.expectedMu.Lock()
|
||||
defer cs.expectedMu.Unlock()
|
||||
|
||||
calls := cs.expected[key]
|
||||
for i, c := range calls {
|
||||
if c == call {
|
||||
// maintain order for remaining calls
|
||||
cs.expected[key] = append(calls[:i], calls[i+1:]...)
|
||||
cs.exhausted[key] = append(cs.exhausted[key], call)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FindMatch searches for a matching call. Returns error with explanation message if no call matched.
|
||||
func (cs callSet) FindMatch(receiver any, method string, args []any) (*Call, error) {
|
||||
key := callSetKey{receiver, method}
|
||||
|
||||
cs.expectedMu.Lock()
|
||||
defer cs.expectedMu.Unlock()
|
||||
|
||||
// Search through the expected calls.
|
||||
expected := cs.expected[key]
|
||||
var callsErrors bytes.Buffer
|
||||
for _, call := range expected {
|
||||
err := call.matches(args)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(&callsErrors, "\n%v", err)
|
||||
} else {
|
||||
return call, nil
|
||||
}
|
||||
}
|
||||
|
||||
// If we haven't found a match then search through the exhausted calls so we
|
||||
// get useful error messages.
|
||||
exhausted := cs.exhausted[key]
|
||||
for _, call := range exhausted {
|
||||
if err := call.matches(args); err != nil {
|
||||
_, _ = fmt.Fprintf(&callsErrors, "\n%v", err)
|
||||
continue
|
||||
}
|
||||
_, _ = fmt.Fprintf(
|
||||
&callsErrors, "all expected calls for method %q have been exhausted", method,
|
||||
)
|
||||
}
|
||||
|
||||
if len(expected)+len(exhausted) == 0 {
|
||||
_, _ = fmt.Fprintf(&callsErrors, "there are no expected calls of the method %q for that receiver", method)
|
||||
}
|
||||
|
||||
return nil, errors.New(callsErrors.String())
|
||||
}
|
||||
|
||||
// Failures returns the calls that are not satisfied.
|
||||
func (cs callSet) Failures() []*Call {
|
||||
cs.expectedMu.Lock()
|
||||
defer cs.expectedMu.Unlock()
|
||||
|
||||
failures := make([]*Call, 0, len(cs.expected))
|
||||
for _, calls := range cs.expected {
|
||||
for _, call := range calls {
|
||||
if !call.satisfied() {
|
||||
failures = append(failures, call)
|
||||
}
|
||||
}
|
||||
}
|
||||
return failures
|
||||
}
|
||||
|
||||
// Satisfied returns true in case all expected calls in this callSet are satisfied.
|
||||
func (cs callSet) Satisfied() bool {
|
||||
cs.expectedMu.Lock()
|
||||
defer cs.expectedMu.Unlock()
|
||||
|
||||
for _, calls := range cs.expected {
|
||||
for _, call := range calls {
|
||||
if !call.satisfied() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
326
vendor/go.uber.org/mock/gomock/controller.go
generated
vendored
326
vendor/go.uber.org/mock/gomock/controller.go
generated
vendored
@@ -1,326 +0,0 @@
|
||||
// Copyright 2010 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gomock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A TestReporter is something that can be used to report test failures. It
|
||||
// is satisfied by the standard library's *testing.T.
|
||||
type TestReporter interface {
|
||||
Errorf(format string, args ...any)
|
||||
Fatalf(format string, args ...any)
|
||||
}
|
||||
|
||||
// TestHelper is a TestReporter that has the Helper method. It is satisfied
|
||||
// by the standard library's *testing.T.
|
||||
type TestHelper interface {
|
||||
TestReporter
|
||||
Helper()
|
||||
}
|
||||
|
||||
// cleanuper is used to check if TestHelper also has the `Cleanup` method. A
|
||||
// common pattern is to pass in a `*testing.T` to
|
||||
// `NewController(t TestReporter)`. In Go 1.14+, `*testing.T` has a cleanup
|
||||
// method. This can be utilized to call `Finish()` so the caller of this library
|
||||
// does not have to.
|
||||
type cleanuper interface {
|
||||
Cleanup(func())
|
||||
}
|
||||
|
||||
// A Controller represents the top-level control of a mock ecosystem. It
|
||||
// defines the scope and lifetime of mock objects, as well as their
|
||||
// expectations. It is safe to call Controller's methods from multiple
|
||||
// goroutines. Each test should create a new Controller.
|
||||
//
|
||||
// func TestFoo(t *testing.T) {
|
||||
// ctrl := gomock.NewController(t)
|
||||
// // ..
|
||||
// }
|
||||
//
|
||||
// func TestBar(t *testing.T) {
|
||||
// t.Run("Sub-Test-1", st) {
|
||||
// ctrl := gomock.NewController(st)
|
||||
// // ..
|
||||
// })
|
||||
// t.Run("Sub-Test-2", st) {
|
||||
// ctrl := gomock.NewController(st)
|
||||
// // ..
|
||||
// })
|
||||
// })
|
||||
type Controller struct {
|
||||
// T should only be called within a generated mock. It is not intended to
|
||||
// be used in user code and may be changed in future versions. T is the
|
||||
// TestReporter passed in when creating the Controller via NewController.
|
||||
// If the TestReporter does not implement a TestHelper it will be wrapped
|
||||
// with a nopTestHelper.
|
||||
T TestHelper
|
||||
mu sync.Mutex
|
||||
expectedCalls *callSet
|
||||
finished bool
|
||||
}
|
||||
|
||||
// NewController returns a new Controller. It is the preferred way to create a Controller.
|
||||
//
|
||||
// Passing [*testing.T] registers cleanup function to automatically call [Controller.Finish]
|
||||
// when the test and all its subtests complete.
|
||||
func NewController(t TestReporter, opts ...ControllerOption) *Controller {
|
||||
h, ok := t.(TestHelper)
|
||||
if !ok {
|
||||
h = &nopTestHelper{t}
|
||||
}
|
||||
ctrl := &Controller{
|
||||
T: h,
|
||||
expectedCalls: newCallSet(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.apply(ctrl)
|
||||
}
|
||||
if c, ok := isCleanuper(ctrl.T); ok {
|
||||
c.Cleanup(func() {
|
||||
ctrl.T.Helper()
|
||||
ctrl.finish(true, nil)
|
||||
})
|
||||
}
|
||||
|
||||
return ctrl
|
||||
}
|
||||
|
||||
// ControllerOption configures how a Controller should behave.
|
||||
type ControllerOption interface {
|
||||
apply(*Controller)
|
||||
}
|
||||
|
||||
type overridableExpectationsOption struct{}
|
||||
|
||||
// WithOverridableExpectations allows for overridable call expectations
|
||||
// i.e., subsequent call expectations override existing call expectations
|
||||
func WithOverridableExpectations() overridableExpectationsOption {
|
||||
return overridableExpectationsOption{}
|
||||
}
|
||||
|
||||
func (o overridableExpectationsOption) apply(ctrl *Controller) {
|
||||
ctrl.expectedCalls = newOverridableCallSet()
|
||||
}
|
||||
|
||||
type cancelReporter struct {
|
||||
t TestHelper
|
||||
cancel func()
|
||||
}
|
||||
|
||||
func (r *cancelReporter) Errorf(format string, args ...any) {
|
||||
r.t.Errorf(format, args...)
|
||||
}
|
||||
|
||||
func (r *cancelReporter) Fatalf(format string, args ...any) {
|
||||
defer r.cancel()
|
||||
r.t.Fatalf(format, args...)
|
||||
}
|
||||
|
||||
func (r *cancelReporter) Helper() {
|
||||
r.t.Helper()
|
||||
}
|
||||
|
||||
// WithContext returns a new Controller and a Context, which is cancelled on any
|
||||
// fatal failure.
|
||||
func WithContext(ctx context.Context, t TestReporter) (*Controller, context.Context) {
|
||||
h, ok := t.(TestHelper)
|
||||
if !ok {
|
||||
h = &nopTestHelper{t: t}
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
return NewController(&cancelReporter{t: h, cancel: cancel}), ctx
|
||||
}
|
||||
|
||||
type nopTestHelper struct {
|
||||
t TestReporter
|
||||
}
|
||||
|
||||
func (h *nopTestHelper) Errorf(format string, args ...any) {
|
||||
h.t.Errorf(format, args...)
|
||||
}
|
||||
|
||||
func (h *nopTestHelper) Fatalf(format string, args ...any) {
|
||||
h.t.Fatalf(format, args...)
|
||||
}
|
||||
|
||||
func (h nopTestHelper) Helper() {}
|
||||
|
||||
// RecordCall is called by a mock. It should not be called by user code.
|
||||
func (ctrl *Controller) RecordCall(receiver any, method string, args ...any) *Call {
|
||||
ctrl.T.Helper()
|
||||
|
||||
recv := reflect.ValueOf(receiver)
|
||||
for i := 0; i < recv.Type().NumMethod(); i++ {
|
||||
if recv.Type().Method(i).Name == method {
|
||||
return ctrl.RecordCallWithMethodType(receiver, method, recv.Method(i).Type(), args...)
|
||||
}
|
||||
}
|
||||
ctrl.T.Fatalf("gomock: failed finding method %s on %T", method, receiver)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// RecordCallWithMethodType is called by a mock. It should not be called by user code.
|
||||
func (ctrl *Controller) RecordCallWithMethodType(receiver any, method string, methodType reflect.Type, args ...any) *Call {
|
||||
ctrl.T.Helper()
|
||||
|
||||
call := newCall(ctrl.T, receiver, method, methodType, args...)
|
||||
|
||||
ctrl.mu.Lock()
|
||||
defer ctrl.mu.Unlock()
|
||||
ctrl.expectedCalls.Add(call)
|
||||
|
||||
return call
|
||||
}
|
||||
|
||||
// Call is called by a mock. It should not be called by user code.
|
||||
func (ctrl *Controller) Call(receiver any, method string, args ...any) []any {
|
||||
ctrl.T.Helper()
|
||||
|
||||
// Nest this code so we can use defer to make sure the lock is released.
|
||||
actions := func() []func([]any) []any {
|
||||
ctrl.T.Helper()
|
||||
ctrl.mu.Lock()
|
||||
defer ctrl.mu.Unlock()
|
||||
|
||||
expected, err := ctrl.expectedCalls.FindMatch(receiver, method, args)
|
||||
if err != nil {
|
||||
// callerInfo's skip should be updated if the number of calls between the user's test
|
||||
// and this line changes, i.e. this code is wrapped in another anonymous function.
|
||||
// 0 is us, 1 is controller.Call(), 2 is the generated mock, and 3 is the user's test.
|
||||
origin := callerInfo(3)
|
||||
stringArgs := make([]string, len(args))
|
||||
for i, arg := range args {
|
||||
stringArgs[i] = getString(arg)
|
||||
}
|
||||
ctrl.T.Fatalf("Unexpected call to %T.%v(%v) at %s because: %s", receiver, method, stringArgs, origin, err)
|
||||
}
|
||||
|
||||
// Two things happen here:
|
||||
// * the matching call no longer needs to check prerequisite calls,
|
||||
// * and the prerequisite calls are no longer expected, so remove them.
|
||||
preReqCalls := expected.dropPrereqs()
|
||||
for _, preReqCall := range preReqCalls {
|
||||
ctrl.expectedCalls.Remove(preReqCall)
|
||||
}
|
||||
|
||||
actions := expected.call()
|
||||
if expected.exhausted() {
|
||||
ctrl.expectedCalls.Remove(expected)
|
||||
}
|
||||
return actions
|
||||
}()
|
||||
|
||||
var rets []any
|
||||
for _, action := range actions {
|
||||
if r := action(args); r != nil {
|
||||
rets = r
|
||||
}
|
||||
}
|
||||
|
||||
return rets
|
||||
}
|
||||
|
||||
// Finish checks to see if all the methods that were expected to be called were called.
|
||||
// It is not idempotent and therefore can only be invoked once.
|
||||
//
|
||||
// Note: If you pass a *testing.T into [NewController], you no longer
|
||||
// need to call ctrl.Finish() in your test methods.
|
||||
func (ctrl *Controller) Finish() {
|
||||
// If we're currently panicking, probably because this is a deferred call.
|
||||
// This must be recovered in the deferred function.
|
||||
err := recover()
|
||||
ctrl.finish(false, err)
|
||||
}
|
||||
|
||||
// Satisfied returns whether all expected calls bound to this Controller have been satisfied.
|
||||
// Calling Finish is then guaranteed to not fail due to missing calls.
|
||||
func (ctrl *Controller) Satisfied() bool {
|
||||
ctrl.mu.Lock()
|
||||
defer ctrl.mu.Unlock()
|
||||
return ctrl.expectedCalls.Satisfied()
|
||||
}
|
||||
|
||||
func (ctrl *Controller) finish(cleanup bool, panicErr any) {
|
||||
ctrl.T.Helper()
|
||||
|
||||
ctrl.mu.Lock()
|
||||
defer ctrl.mu.Unlock()
|
||||
|
||||
if ctrl.finished {
|
||||
if _, ok := isCleanuper(ctrl.T); !ok {
|
||||
ctrl.T.Fatalf("Controller.Finish was called more than once. It has to be called exactly once.")
|
||||
}
|
||||
return
|
||||
}
|
||||
ctrl.finished = true
|
||||
|
||||
// Short-circuit, pass through the panic.
|
||||
if panicErr != nil {
|
||||
panic(panicErr)
|
||||
}
|
||||
|
||||
// Check that all remaining expected calls are satisfied.
|
||||
failures := ctrl.expectedCalls.Failures()
|
||||
for _, call := range failures {
|
||||
ctrl.T.Errorf("missing call(s) to %v", call)
|
||||
}
|
||||
if len(failures) != 0 {
|
||||
if !cleanup {
|
||||
ctrl.T.Fatalf("aborting test due to missing call(s)")
|
||||
return
|
||||
}
|
||||
ctrl.T.Errorf("aborting test due to missing call(s)")
|
||||
}
|
||||
}
|
||||
|
||||
// callerInfo returns the file:line of the call site. skip is the number
|
||||
// of stack frames to skip when reporting. 0 is callerInfo's call site.
|
||||
func callerInfo(skip int) string {
|
||||
if _, file, line, ok := runtime.Caller(skip + 1); ok {
|
||||
return fmt.Sprintf("%s:%d", file, line)
|
||||
}
|
||||
return "unknown file"
|
||||
}
|
||||
|
||||
// isCleanuper checks it if t's base TestReporter has a Cleanup method.
|
||||
func isCleanuper(t TestReporter) (cleanuper, bool) {
|
||||
tr := unwrapTestReporter(t)
|
||||
c, ok := tr.(cleanuper)
|
||||
return c, ok
|
||||
}
|
||||
|
||||
// unwrapTestReporter unwraps TestReporter to the base implementation.
|
||||
func unwrapTestReporter(t TestReporter) TestReporter {
|
||||
tr := t
|
||||
switch nt := t.(type) {
|
||||
case *cancelReporter:
|
||||
tr = nt.t
|
||||
if h, check := tr.(*nopTestHelper); check {
|
||||
tr = h.t
|
||||
}
|
||||
case *nopTestHelper:
|
||||
tr = nt.t
|
||||
default:
|
||||
// not wrapped
|
||||
}
|
||||
return tr
|
||||
}
|
||||
60
vendor/go.uber.org/mock/gomock/doc.go
generated
vendored
60
vendor/go.uber.org/mock/gomock/doc.go
generated
vendored
@@ -1,60 +0,0 @@
|
||||
// Copyright 2022 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package gomock is a mock framework for Go.
|
||||
//
|
||||
// Standard usage:
|
||||
//
|
||||
// (1) Define an interface that you wish to mock.
|
||||
// type MyInterface interface {
|
||||
// SomeMethod(x int64, y string)
|
||||
// }
|
||||
// (2) Use mockgen to generate a mock from the interface.
|
||||
// (3) Use the mock in a test:
|
||||
// func TestMyThing(t *testing.T) {
|
||||
// mockCtrl := gomock.NewController(t)
|
||||
// mockObj := something.NewMockMyInterface(mockCtrl)
|
||||
// mockObj.EXPECT().SomeMethod(4, "blah")
|
||||
// // pass mockObj to a real object and play with it.
|
||||
// }
|
||||
//
|
||||
// By default, expected calls are not enforced to run in any particular order.
|
||||
// Call order dependency can be enforced by use of InOrder and/or Call.After.
|
||||
// Call.After can create more varied call order dependencies, but InOrder is
|
||||
// often more convenient.
|
||||
//
|
||||
// The following examples create equivalent call order dependencies.
|
||||
//
|
||||
// Example of using Call.After to chain expected call order:
|
||||
//
|
||||
// firstCall := mockObj.EXPECT().SomeMethod(1, "first")
|
||||
// secondCall := mockObj.EXPECT().SomeMethod(2, "second").After(firstCall)
|
||||
// mockObj.EXPECT().SomeMethod(3, "third").After(secondCall)
|
||||
//
|
||||
// Example of using InOrder to declare expected call order:
|
||||
//
|
||||
// gomock.InOrder(
|
||||
// mockObj.EXPECT().SomeMethod(1, "first"),
|
||||
// mockObj.EXPECT().SomeMethod(2, "second"),
|
||||
// mockObj.EXPECT().SomeMethod(3, "third"),
|
||||
// )
|
||||
//
|
||||
// The standard TestReporter most users will pass to `NewController` is a
|
||||
// `*testing.T` from the context of the test. Note that this will use the
|
||||
// standard `t.Error` and `t.Fatal` methods to report what happened in the test.
|
||||
// In some cases this can leave your testing package in a weird state if global
|
||||
// state is used since `t.Fatal` is like calling panic in the middle of a
|
||||
// function. In these cases it is recommended that you pass in your own
|
||||
// `TestReporter`.
|
||||
package gomock
|
||||
447
vendor/go.uber.org/mock/gomock/matchers.go
generated
vendored
447
vendor/go.uber.org/mock/gomock/matchers.go
generated
vendored
@@ -1,447 +0,0 @@
|
||||
// Copyright 2010 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gomock
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Matcher is a representation of a class of values.
|
||||
// It is used to represent the valid or expected arguments to a mocked method.
|
||||
type Matcher interface {
|
||||
// Matches returns whether x is a match.
|
||||
Matches(x any) bool
|
||||
|
||||
// String describes what the matcher matches.
|
||||
String() string
|
||||
}
|
||||
|
||||
// WantFormatter modifies the given Matcher's String() method to the given
|
||||
// Stringer. This allows for control on how the "Want" is formatted when
|
||||
// printing .
|
||||
func WantFormatter(s fmt.Stringer, m Matcher) Matcher {
|
||||
type matcher interface {
|
||||
Matches(x any) bool
|
||||
}
|
||||
|
||||
return struct {
|
||||
matcher
|
||||
fmt.Stringer
|
||||
}{
|
||||
matcher: m,
|
||||
Stringer: s,
|
||||
}
|
||||
}
|
||||
|
||||
// StringerFunc type is an adapter to allow the use of ordinary functions as
|
||||
// a Stringer. If f is a function with the appropriate signature,
|
||||
// StringerFunc(f) is a Stringer that calls f.
|
||||
type StringerFunc func() string
|
||||
|
||||
// String implements fmt.Stringer.
|
||||
func (f StringerFunc) String() string {
|
||||
return f()
|
||||
}
|
||||
|
||||
// GotFormatter is used to better print failure messages. If a matcher
|
||||
// implements GotFormatter, it will use the result from Got when printing
|
||||
// the failure message.
|
||||
type GotFormatter interface {
|
||||
// Got is invoked with the received value. The result is used when
|
||||
// printing the failure message.
|
||||
Got(got any) string
|
||||
}
|
||||
|
||||
// GotFormatterFunc type is an adapter to allow the use of ordinary
|
||||
// functions as a GotFormatter. If f is a function with the appropriate
|
||||
// signature, GotFormatterFunc(f) is a GotFormatter that calls f.
|
||||
type GotFormatterFunc func(got any) string
|
||||
|
||||
// Got implements GotFormatter.
|
||||
func (f GotFormatterFunc) Got(got any) string {
|
||||
return f(got)
|
||||
}
|
||||
|
||||
// GotFormatterAdapter attaches a GotFormatter to a Matcher.
|
||||
func GotFormatterAdapter(s GotFormatter, m Matcher) Matcher {
|
||||
return struct {
|
||||
GotFormatter
|
||||
Matcher
|
||||
}{
|
||||
GotFormatter: s,
|
||||
Matcher: m,
|
||||
}
|
||||
}
|
||||
|
||||
type anyMatcher struct{}
|
||||
|
||||
func (anyMatcher) Matches(any) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (anyMatcher) String() string {
|
||||
return "is anything"
|
||||
}
|
||||
|
||||
type condMatcher[T any] struct {
|
||||
fn func(x T) bool
|
||||
}
|
||||
|
||||
func (c condMatcher[T]) Matches(x any) bool {
|
||||
typed, ok := x.(T)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return c.fn(typed)
|
||||
}
|
||||
|
||||
func (c condMatcher[T]) String() string {
|
||||
return "adheres to a custom condition"
|
||||
}
|
||||
|
||||
type eqMatcher struct {
|
||||
x any
|
||||
}
|
||||
|
||||
func (e eqMatcher) Matches(x any) bool {
|
||||
// In case, some value is nil
|
||||
if e.x == nil || x == nil {
|
||||
return reflect.DeepEqual(e.x, x)
|
||||
}
|
||||
|
||||
// Check if types assignable and convert them to common type
|
||||
x1Val := reflect.ValueOf(e.x)
|
||||
x2Val := reflect.ValueOf(x)
|
||||
|
||||
if x1Val.Type().AssignableTo(x2Val.Type()) {
|
||||
x1ValConverted := x1Val.Convert(x2Val.Type())
|
||||
return reflect.DeepEqual(x1ValConverted.Interface(), x2Val.Interface())
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (e eqMatcher) String() string {
|
||||
return fmt.Sprintf("is equal to %s (%T)", getString(e.x), e.x)
|
||||
}
|
||||
|
||||
type nilMatcher struct{}
|
||||
|
||||
func (nilMatcher) Matches(x any) bool {
|
||||
if x == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
v := reflect.ValueOf(x)
|
||||
switch v.Kind() {
|
||||
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map,
|
||||
reflect.Ptr, reflect.Slice:
|
||||
return v.IsNil()
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (nilMatcher) String() string {
|
||||
return "is nil"
|
||||
}
|
||||
|
||||
type notMatcher struct {
|
||||
m Matcher
|
||||
}
|
||||
|
||||
func (n notMatcher) Matches(x any) bool {
|
||||
return !n.m.Matches(x)
|
||||
}
|
||||
|
||||
func (n notMatcher) String() string {
|
||||
return "not(" + n.m.String() + ")"
|
||||
}
|
||||
|
||||
type regexMatcher struct {
|
||||
regex *regexp.Regexp
|
||||
}
|
||||
|
||||
func (m regexMatcher) Matches(x any) bool {
|
||||
switch t := x.(type) {
|
||||
case string:
|
||||
return m.regex.MatchString(t)
|
||||
case []byte:
|
||||
return m.regex.Match(t)
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (m regexMatcher) String() string {
|
||||
return "matches regex " + m.regex.String()
|
||||
}
|
||||
|
||||
type assignableToTypeOfMatcher struct {
|
||||
targetType reflect.Type
|
||||
}
|
||||
|
||||
func (m assignableToTypeOfMatcher) Matches(x any) bool {
|
||||
return reflect.TypeOf(x).AssignableTo(m.targetType)
|
||||
}
|
||||
|
||||
func (m assignableToTypeOfMatcher) String() string {
|
||||
return "is assignable to " + m.targetType.Name()
|
||||
}
|
||||
|
||||
type anyOfMatcher struct {
|
||||
matchers []Matcher
|
||||
}
|
||||
|
||||
func (am anyOfMatcher) Matches(x any) bool {
|
||||
for _, m := range am.matchers {
|
||||
if m.Matches(x) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (am anyOfMatcher) String() string {
|
||||
ss := make([]string, 0, len(am.matchers))
|
||||
for _, matcher := range am.matchers {
|
||||
ss = append(ss, matcher.String())
|
||||
}
|
||||
return strings.Join(ss, " | ")
|
||||
}
|
||||
|
||||
type allMatcher struct {
|
||||
matchers []Matcher
|
||||
}
|
||||
|
||||
func (am allMatcher) Matches(x any) bool {
|
||||
for _, m := range am.matchers {
|
||||
if !m.Matches(x) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (am allMatcher) String() string {
|
||||
ss := make([]string, 0, len(am.matchers))
|
||||
for _, matcher := range am.matchers {
|
||||
ss = append(ss, matcher.String())
|
||||
}
|
||||
return strings.Join(ss, "; ")
|
||||
}
|
||||
|
||||
type lenMatcher struct {
|
||||
i int
|
||||
}
|
||||
|
||||
func (m lenMatcher) Matches(x any) bool {
|
||||
v := reflect.ValueOf(x)
|
||||
switch v.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
|
||||
return v.Len() == m.i
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (m lenMatcher) String() string {
|
||||
return fmt.Sprintf("has length %d", m.i)
|
||||
}
|
||||
|
||||
type inAnyOrderMatcher struct {
|
||||
x any
|
||||
}
|
||||
|
||||
func (m inAnyOrderMatcher) Matches(x any) bool {
|
||||
given, ok := m.prepareValue(x)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
wanted, ok := m.prepareValue(m.x)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if given.Len() != wanted.Len() {
|
||||
return false
|
||||
}
|
||||
|
||||
usedFromGiven := make([]bool, given.Len())
|
||||
foundFromWanted := make([]bool, wanted.Len())
|
||||
for i := 0; i < wanted.Len(); i++ {
|
||||
wantedMatcher := Eq(wanted.Index(i).Interface())
|
||||
for j := 0; j < given.Len(); j++ {
|
||||
if usedFromGiven[j] {
|
||||
continue
|
||||
}
|
||||
if wantedMatcher.Matches(given.Index(j).Interface()) {
|
||||
foundFromWanted[i] = true
|
||||
usedFromGiven[j] = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
missingFromWanted := 0
|
||||
for _, found := range foundFromWanted {
|
||||
if !found {
|
||||
missingFromWanted++
|
||||
}
|
||||
}
|
||||
extraInGiven := 0
|
||||
for _, used := range usedFromGiven {
|
||||
if !used {
|
||||
extraInGiven++
|
||||
}
|
||||
}
|
||||
|
||||
return extraInGiven == 0 && missingFromWanted == 0
|
||||
}
|
||||
|
||||
func (m inAnyOrderMatcher) prepareValue(x any) (reflect.Value, bool) {
|
||||
xValue := reflect.ValueOf(x)
|
||||
switch xValue.Kind() {
|
||||
case reflect.Slice, reflect.Array:
|
||||
return xValue, true
|
||||
default:
|
||||
return reflect.Value{}, false
|
||||
}
|
||||
}
|
||||
|
||||
func (m inAnyOrderMatcher) String() string {
|
||||
return fmt.Sprintf("has the same elements as %v", m.x)
|
||||
}
|
||||
|
||||
// Constructors
|
||||
|
||||
// All returns a composite Matcher that returns true if and only all of the
|
||||
// matchers return true.
|
||||
func All(ms ...Matcher) Matcher { return allMatcher{ms} }
|
||||
|
||||
// Any returns a matcher that always matches.
|
||||
func Any() Matcher { return anyMatcher{} }
|
||||
|
||||
// Cond returns a matcher that matches when the given function returns true
|
||||
// after passing it the parameter to the mock function.
|
||||
// This is particularly useful in case you want to match over a field of a custom struct, or dynamic logic.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// Cond(func(x int){return x == 1}).Matches(1) // returns true
|
||||
// Cond(func(x int){return x == 2}).Matches(1) // returns false
|
||||
func Cond[T any](fn func(x T) bool) Matcher { return condMatcher[T]{fn} }
|
||||
|
||||
// AnyOf returns a composite Matcher that returns true if at least one of the
|
||||
// matchers returns true.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// AnyOf(1, 2, 3).Matches(2) // returns true
|
||||
// AnyOf(1, 2, 3).Matches(10) // returns false
|
||||
// AnyOf(Nil(), Len(2)).Matches(nil) // returns true
|
||||
// AnyOf(Nil(), Len(2)).Matches("hi") // returns true
|
||||
// AnyOf(Nil(), Len(2)).Matches("hello") // returns false
|
||||
func AnyOf(xs ...any) Matcher {
|
||||
ms := make([]Matcher, 0, len(xs))
|
||||
for _, x := range xs {
|
||||
if m, ok := x.(Matcher); ok {
|
||||
ms = append(ms, m)
|
||||
} else {
|
||||
ms = append(ms, Eq(x))
|
||||
}
|
||||
}
|
||||
return anyOfMatcher{ms}
|
||||
}
|
||||
|
||||
// Eq returns a matcher that matches on equality.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// Eq(5).Matches(5) // returns true
|
||||
// Eq(5).Matches(4) // returns false
|
||||
func Eq(x any) Matcher { return eqMatcher{x} }
|
||||
|
||||
// Len returns a matcher that matches on length. This matcher returns false if
|
||||
// is compared to a type that is not an array, chan, map, slice, or string.
|
||||
func Len(i int) Matcher {
|
||||
return lenMatcher{i}
|
||||
}
|
||||
|
||||
// Nil returns a matcher that matches if the received value is nil.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// var x *bytes.Buffer
|
||||
// Nil().Matches(x) // returns true
|
||||
// x = &bytes.Buffer{}
|
||||
// Nil().Matches(x) // returns false
|
||||
func Nil() Matcher { return nilMatcher{} }
|
||||
|
||||
// Not reverses the results of its given child matcher.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// Not(Eq(5)).Matches(4) // returns true
|
||||
// Not(Eq(5)).Matches(5) // returns false
|
||||
func Not(x any) Matcher {
|
||||
if m, ok := x.(Matcher); ok {
|
||||
return notMatcher{m}
|
||||
}
|
||||
return notMatcher{Eq(x)}
|
||||
}
|
||||
|
||||
// Regex checks whether parameter matches the associated regex.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// Regex("[0-9]{2}:[0-9]{2}").Matches("23:02") // returns true
|
||||
// Regex("[0-9]{2}:[0-9]{2}").Matches([]byte{'2', '3', ':', '0', '2'}) // returns true
|
||||
// Regex("[0-9]{2}:[0-9]{2}").Matches("hello world") // returns false
|
||||
// Regex("[0-9]{2}").Matches(21) // returns false as it's not a valid type
|
||||
func Regex(regexStr string) Matcher {
|
||||
return regexMatcher{regex: regexp.MustCompile(regexStr)}
|
||||
}
|
||||
|
||||
// AssignableToTypeOf is a Matcher that matches if the parameter to the mock
|
||||
// function is assignable to the type of the parameter to this function.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// var s fmt.Stringer = &bytes.Buffer{}
|
||||
// AssignableToTypeOf(s).Matches(time.Second) // returns true
|
||||
// AssignableToTypeOf(s).Matches(99) // returns false
|
||||
//
|
||||
// var ctx = reflect.TypeOf((*context.Context)(nil)).Elem()
|
||||
// AssignableToTypeOf(ctx).Matches(context.Background()) // returns true
|
||||
func AssignableToTypeOf(x any) Matcher {
|
||||
if xt, ok := x.(reflect.Type); ok {
|
||||
return assignableToTypeOfMatcher{xt}
|
||||
}
|
||||
return assignableToTypeOfMatcher{reflect.TypeOf(x)}
|
||||
}
|
||||
|
||||
// InAnyOrder is a Matcher that returns true for collections of the same elements ignoring the order.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// InAnyOrder([]int{1, 2, 3}).Matches([]int{1, 3, 2}) // returns true
|
||||
// InAnyOrder([]int{1, 2, 3}).Matches([]int{1, 2}) // returns false
|
||||
func InAnyOrder(x any) Matcher {
|
||||
return inAnyOrderMatcher{x}
|
||||
}
|
||||
36
vendor/go.uber.org/mock/gomock/string.go
generated
vendored
36
vendor/go.uber.org/mock/gomock/string.go
generated
vendored
@@ -1,36 +0,0 @@
|
||||
package gomock
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// getString is a safe way to convert a value to a string for printing results
|
||||
// If the value is a a mock, getString avoids calling the mocked String() method,
|
||||
// which avoids potential deadlocks
|
||||
func getString(x any) string {
|
||||
if isGeneratedMock(x) {
|
||||
return fmt.Sprintf("%T", x)
|
||||
}
|
||||
if s, ok := x.(fmt.Stringer); ok {
|
||||
return s.String()
|
||||
}
|
||||
return fmt.Sprintf("%v", x)
|
||||
}
|
||||
|
||||
// isGeneratedMock checks if the given type has a "isgomock" field,
|
||||
// indicating it is a generated mock.
|
||||
func isGeneratedMock(x any) bool {
|
||||
typ := reflect.TypeOf(x)
|
||||
if typ == nil {
|
||||
return false
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
typ = typ.Elem()
|
||||
}
|
||||
if typ.Kind() != reflect.Struct {
|
||||
return false
|
||||
}
|
||||
_, isgomock := typ.FieldByName("isgomock")
|
||||
return isgomock
|
||||
}
|
||||
533
vendor/go.uber.org/mock/mockgen/model/model.go
generated
vendored
533
vendor/go.uber.org/mock/mockgen/model/model.go
generated
vendored
@@ -1,533 +0,0 @@
|
||||
// Copyright 2012 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package model contains the data model necessary for generating mock implementations.
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/gob"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// pkgPath is the importable path for package model
|
||||
const pkgPath = "go.uber.org/mock/mockgen/model"
|
||||
|
||||
// Package is a Go package. It may be a subset.
|
||||
type Package struct {
|
||||
Name string
|
||||
PkgPath string
|
||||
Interfaces []*Interface
|
||||
DotImports []string
|
||||
}
|
||||
|
||||
// Print writes the package name and its exported interfaces.
|
||||
func (pkg *Package) Print(w io.Writer) {
|
||||
_, _ = fmt.Fprintf(w, "package %s\n", pkg.Name)
|
||||
for _, intf := range pkg.Interfaces {
|
||||
intf.Print(w)
|
||||
}
|
||||
}
|
||||
|
||||
// Imports returns the imports needed by the Package as a set of import paths.
|
||||
func (pkg *Package) Imports() map[string]bool {
|
||||
im := make(map[string]bool)
|
||||
for _, intf := range pkg.Interfaces {
|
||||
intf.addImports(im)
|
||||
for _, tp := range intf.TypeParams {
|
||||
tp.Type.addImports(im)
|
||||
}
|
||||
}
|
||||
return im
|
||||
}
|
||||
|
||||
// Interface is a Go interface.
|
||||
type Interface struct {
|
||||
Name string
|
||||
Methods []*Method
|
||||
TypeParams []*Parameter
|
||||
}
|
||||
|
||||
// Print writes the interface name and its methods.
|
||||
func (intf *Interface) Print(w io.Writer) {
|
||||
_, _ = fmt.Fprintf(w, "interface %s\n", intf.Name)
|
||||
for _, m := range intf.Methods {
|
||||
m.Print(w)
|
||||
}
|
||||
}
|
||||
|
||||
func (intf *Interface) addImports(im map[string]bool) {
|
||||
for _, m := range intf.Methods {
|
||||
m.addImports(im)
|
||||
}
|
||||
}
|
||||
|
||||
// AddMethod adds a new method, de-duplicating by method name.
|
||||
func (intf *Interface) AddMethod(m *Method) {
|
||||
for _, me := range intf.Methods {
|
||||
if me.Name == m.Name {
|
||||
return
|
||||
}
|
||||
}
|
||||
intf.Methods = append(intf.Methods, m)
|
||||
}
|
||||
|
||||
// Method is a single method of an interface.
|
||||
type Method struct {
|
||||
Name string
|
||||
In, Out []*Parameter
|
||||
Variadic *Parameter // may be nil
|
||||
}
|
||||
|
||||
// Print writes the method name and its signature.
|
||||
func (m *Method) Print(w io.Writer) {
|
||||
_, _ = fmt.Fprintf(w, " - method %s\n", m.Name)
|
||||
if len(m.In) > 0 {
|
||||
_, _ = fmt.Fprintf(w, " in:\n")
|
||||
for _, p := range m.In {
|
||||
p.Print(w)
|
||||
}
|
||||
}
|
||||
if m.Variadic != nil {
|
||||
_, _ = fmt.Fprintf(w, " ...:\n")
|
||||
m.Variadic.Print(w)
|
||||
}
|
||||
if len(m.Out) > 0 {
|
||||
_, _ = fmt.Fprintf(w, " out:\n")
|
||||
for _, p := range m.Out {
|
||||
p.Print(w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Method) addImports(im map[string]bool) {
|
||||
for _, p := range m.In {
|
||||
p.Type.addImports(im)
|
||||
}
|
||||
if m.Variadic != nil {
|
||||
m.Variadic.Type.addImports(im)
|
||||
}
|
||||
for _, p := range m.Out {
|
||||
p.Type.addImports(im)
|
||||
}
|
||||
}
|
||||
|
||||
// Parameter is an argument or return parameter of a method.
|
||||
type Parameter struct {
|
||||
Name string // may be empty
|
||||
Type Type
|
||||
}
|
||||
|
||||
// Print writes a method parameter.
|
||||
func (p *Parameter) Print(w io.Writer) {
|
||||
n := p.Name
|
||||
if n == "" {
|
||||
n = `""`
|
||||
}
|
||||
_, _ = fmt.Fprintf(w, " - %v: %v\n", n, p.Type.String(nil, ""))
|
||||
}
|
||||
|
||||
// Type is a Go type.
|
||||
type Type interface {
|
||||
String(pm map[string]string, pkgOverride string) string
|
||||
addImports(im map[string]bool)
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Call gob.RegisterName with pkgPath as prefix to avoid conflicting with
|
||||
// github.com/golang/mock/mockgen/model 's registration.
|
||||
gob.RegisterName(pkgPath+".ArrayType", &ArrayType{})
|
||||
gob.RegisterName(pkgPath+".ChanType", &ChanType{})
|
||||
gob.RegisterName(pkgPath+".FuncType", &FuncType{})
|
||||
gob.RegisterName(pkgPath+".MapType", &MapType{})
|
||||
gob.RegisterName(pkgPath+".NamedType", &NamedType{})
|
||||
gob.RegisterName(pkgPath+".PointerType", &PointerType{})
|
||||
|
||||
// Call gob.RegisterName to make sure it has the consistent name registered
|
||||
// for both gob decoder and encoder.
|
||||
//
|
||||
// For a non-pointer type, gob.Register will try to get package full path by
|
||||
// calling rt.PkgPath() for a name to register. If your project has vendor
|
||||
// directory, it is possible that PkgPath will get a path like this:
|
||||
// ../../../vendor/go.uber.org/mock/mockgen/model
|
||||
gob.RegisterName(pkgPath+".PredeclaredType", PredeclaredType(""))
|
||||
}
|
||||
|
||||
// ArrayType is an array or slice type.
|
||||
type ArrayType struct {
|
||||
Len int // -1 for slices, >= 0 for arrays
|
||||
Type Type
|
||||
}
|
||||
|
||||
func (at *ArrayType) String(pm map[string]string, pkgOverride string) string {
|
||||
s := "[]"
|
||||
if at.Len > -1 {
|
||||
s = fmt.Sprintf("[%d]", at.Len)
|
||||
}
|
||||
return s + at.Type.String(pm, pkgOverride)
|
||||
}
|
||||
|
||||
func (at *ArrayType) addImports(im map[string]bool) { at.Type.addImports(im) }
|
||||
|
||||
// ChanType is a channel type.
|
||||
type ChanType struct {
|
||||
Dir ChanDir // 0, 1 or 2
|
||||
Type Type
|
||||
}
|
||||
|
||||
func (ct *ChanType) String(pm map[string]string, pkgOverride string) string {
|
||||
s := ct.Type.String(pm, pkgOverride)
|
||||
if ct.Dir == RecvDir {
|
||||
return "<-chan " + s
|
||||
}
|
||||
if ct.Dir == SendDir {
|
||||
return "chan<- " + s
|
||||
}
|
||||
return "chan " + s
|
||||
}
|
||||
|
||||
func (ct *ChanType) addImports(im map[string]bool) { ct.Type.addImports(im) }
|
||||
|
||||
// ChanDir is a channel direction.
|
||||
type ChanDir int
|
||||
|
||||
// Constants for channel directions.
|
||||
const (
|
||||
RecvDir ChanDir = 1
|
||||
SendDir ChanDir = 2
|
||||
)
|
||||
|
||||
// FuncType is a function type.
|
||||
type FuncType struct {
|
||||
In, Out []*Parameter
|
||||
Variadic *Parameter // may be nil
|
||||
}
|
||||
|
||||
func (ft *FuncType) String(pm map[string]string, pkgOverride string) string {
|
||||
args := make([]string, len(ft.In))
|
||||
for i, p := range ft.In {
|
||||
args[i] = p.Type.String(pm, pkgOverride)
|
||||
}
|
||||
if ft.Variadic != nil {
|
||||
args = append(args, "..."+ft.Variadic.Type.String(pm, pkgOverride))
|
||||
}
|
||||
rets := make([]string, len(ft.Out))
|
||||
for i, p := range ft.Out {
|
||||
rets[i] = p.Type.String(pm, pkgOverride)
|
||||
}
|
||||
retString := strings.Join(rets, ", ")
|
||||
if nOut := len(ft.Out); nOut == 1 {
|
||||
retString = " " + retString
|
||||
} else if nOut > 1 {
|
||||
retString = " (" + retString + ")"
|
||||
}
|
||||
return "func(" + strings.Join(args, ", ") + ")" + retString
|
||||
}
|
||||
|
||||
func (ft *FuncType) addImports(im map[string]bool) {
|
||||
for _, p := range ft.In {
|
||||
p.Type.addImports(im)
|
||||
}
|
||||
if ft.Variadic != nil {
|
||||
ft.Variadic.Type.addImports(im)
|
||||
}
|
||||
for _, p := range ft.Out {
|
||||
p.Type.addImports(im)
|
||||
}
|
||||
}
|
||||
|
||||
// MapType is a map type.
|
||||
type MapType struct {
|
||||
Key, Value Type
|
||||
}
|
||||
|
||||
func (mt *MapType) String(pm map[string]string, pkgOverride string) string {
|
||||
return "map[" + mt.Key.String(pm, pkgOverride) + "]" + mt.Value.String(pm, pkgOverride)
|
||||
}
|
||||
|
||||
func (mt *MapType) addImports(im map[string]bool) {
|
||||
mt.Key.addImports(im)
|
||||
mt.Value.addImports(im)
|
||||
}
|
||||
|
||||
// NamedType is an exported type in a package.
|
||||
type NamedType struct {
|
||||
Package string // may be empty
|
||||
Type string
|
||||
TypeParams *TypeParametersType
|
||||
}
|
||||
|
||||
func (nt *NamedType) String(pm map[string]string, pkgOverride string) string {
|
||||
if pkgOverride == nt.Package {
|
||||
return nt.Type + nt.TypeParams.String(pm, pkgOverride)
|
||||
}
|
||||
prefix := pm[nt.Package]
|
||||
if prefix != "" {
|
||||
return prefix + "." + nt.Type + nt.TypeParams.String(pm, pkgOverride)
|
||||
}
|
||||
|
||||
return nt.Type + nt.TypeParams.String(pm, pkgOverride)
|
||||
}
|
||||
|
||||
func (nt *NamedType) addImports(im map[string]bool) {
|
||||
if nt.Package != "" {
|
||||
im[nt.Package] = true
|
||||
}
|
||||
nt.TypeParams.addImports(im)
|
||||
}
|
||||
|
||||
// PointerType is a pointer to another type.
|
||||
type PointerType struct {
|
||||
Type Type
|
||||
}
|
||||
|
||||
func (pt *PointerType) String(pm map[string]string, pkgOverride string) string {
|
||||
return "*" + pt.Type.String(pm, pkgOverride)
|
||||
}
|
||||
func (pt *PointerType) addImports(im map[string]bool) { pt.Type.addImports(im) }
|
||||
|
||||
// PredeclaredType is a predeclared type such as "int".
|
||||
type PredeclaredType string
|
||||
|
||||
func (pt PredeclaredType) String(map[string]string, string) string { return string(pt) }
|
||||
func (pt PredeclaredType) addImports(map[string]bool) {}
|
||||
|
||||
// TypeParametersType contains type parameters for a NamedType.
|
||||
type TypeParametersType struct {
|
||||
TypeParameters []Type
|
||||
}
|
||||
|
||||
func (tp *TypeParametersType) String(pm map[string]string, pkgOverride string) string {
|
||||
if tp == nil || len(tp.TypeParameters) == 0 {
|
||||
return ""
|
||||
}
|
||||
var sb strings.Builder
|
||||
sb.WriteString("[")
|
||||
for i, v := range tp.TypeParameters {
|
||||
if i != 0 {
|
||||
sb.WriteString(", ")
|
||||
}
|
||||
sb.WriteString(v.String(pm, pkgOverride))
|
||||
}
|
||||
sb.WriteString("]")
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (tp *TypeParametersType) addImports(im map[string]bool) {
|
||||
if tp == nil {
|
||||
return
|
||||
}
|
||||
for _, v := range tp.TypeParameters {
|
||||
v.addImports(im)
|
||||
}
|
||||
}
|
||||
|
||||
// The following code is intended to be called by the program generated by ../reflect.go.
|
||||
|
||||
// InterfaceFromInterfaceType returns a pointer to an interface for the
|
||||
// given reflection interface type.
|
||||
func InterfaceFromInterfaceType(it reflect.Type) (*Interface, error) {
|
||||
if it.Kind() != reflect.Interface {
|
||||
return nil, fmt.Errorf("%v is not an interface", it)
|
||||
}
|
||||
intf := &Interface{}
|
||||
|
||||
for i := 0; i < it.NumMethod(); i++ {
|
||||
mt := it.Method(i)
|
||||
// TODO: need to skip unexported methods? or just raise an error?
|
||||
m := &Method{
|
||||
Name: mt.Name,
|
||||
}
|
||||
|
||||
var err error
|
||||
m.In, m.Variadic, m.Out, err = funcArgsFromType(mt.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
intf.AddMethod(m)
|
||||
}
|
||||
|
||||
return intf, nil
|
||||
}
|
||||
|
||||
// t's Kind must be a reflect.Func.
|
||||
func funcArgsFromType(t reflect.Type) (in []*Parameter, variadic *Parameter, out []*Parameter, err error) {
|
||||
nin := t.NumIn()
|
||||
if t.IsVariadic() {
|
||||
nin--
|
||||
}
|
||||
var p *Parameter
|
||||
for i := 0; i < nin; i++ {
|
||||
p, err = parameterFromType(t.In(i))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
in = append(in, p)
|
||||
}
|
||||
if t.IsVariadic() {
|
||||
p, err = parameterFromType(t.In(nin).Elem())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
variadic = p
|
||||
}
|
||||
for i := 0; i < t.NumOut(); i++ {
|
||||
p, err = parameterFromType(t.Out(i))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
out = append(out, p)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parameterFromType(t reflect.Type) (*Parameter, error) {
|
||||
tt, err := typeFromType(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Parameter{Type: tt}, nil
|
||||
}
|
||||
|
||||
var errorType = reflect.TypeOf((*error)(nil)).Elem()
|
||||
|
||||
var byteType = reflect.TypeOf(byte(0))
|
||||
|
||||
func typeFromType(t reflect.Type) (Type, error) {
|
||||
// Hack workaround for https://golang.org/issue/3853.
|
||||
// This explicit check should not be necessary.
|
||||
if t == byteType {
|
||||
return PredeclaredType("byte"), nil
|
||||
}
|
||||
|
||||
if imp := t.PkgPath(); imp != "" {
|
||||
return &NamedType{
|
||||
Package: impPath(imp),
|
||||
Type: t.Name(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// only unnamed or predeclared types after here
|
||||
|
||||
// Lots of types have element types. Let's do the parsing and error checking for all of them.
|
||||
var elemType Type
|
||||
switch t.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
var err error
|
||||
elemType, err = typeFromType(t.Elem())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
switch t.Kind() {
|
||||
case reflect.Array:
|
||||
return &ArrayType{
|
||||
Len: t.Len(),
|
||||
Type: elemType,
|
||||
}, nil
|
||||
case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
|
||||
reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128, reflect.String:
|
||||
return PredeclaredType(t.Kind().String()), nil
|
||||
case reflect.Chan:
|
||||
var dir ChanDir
|
||||
switch t.ChanDir() {
|
||||
case reflect.RecvDir:
|
||||
dir = RecvDir
|
||||
case reflect.SendDir:
|
||||
dir = SendDir
|
||||
}
|
||||
return &ChanType{
|
||||
Dir: dir,
|
||||
Type: elemType,
|
||||
}, nil
|
||||
case reflect.Func:
|
||||
in, variadic, out, err := funcArgsFromType(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &FuncType{
|
||||
In: in,
|
||||
Out: out,
|
||||
Variadic: variadic,
|
||||
}, nil
|
||||
case reflect.Interface:
|
||||
// Two special interfaces.
|
||||
if t.NumMethod() == 0 {
|
||||
return PredeclaredType("any"), nil
|
||||
}
|
||||
if t == errorType {
|
||||
return PredeclaredType("error"), nil
|
||||
}
|
||||
case reflect.Map:
|
||||
kt, err := typeFromType(t.Key())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &MapType{
|
||||
Key: kt,
|
||||
Value: elemType,
|
||||
}, nil
|
||||
case reflect.Ptr:
|
||||
return &PointerType{
|
||||
Type: elemType,
|
||||
}, nil
|
||||
case reflect.Slice:
|
||||
return &ArrayType{
|
||||
Len: -1,
|
||||
Type: elemType,
|
||||
}, nil
|
||||
case reflect.Struct:
|
||||
if t.NumField() == 0 {
|
||||
return PredeclaredType("struct{}"), nil
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Struct, UnsafePointer
|
||||
return nil, fmt.Errorf("can't yet turn %v (%v) into a model.Type", t, t.Kind())
|
||||
}
|
||||
|
||||
// impPath sanitizes the package path returned by `PkgPath` method of a reflect Type so that
|
||||
// it is importable. PkgPath might return a path that includes "vendor". These paths do not
|
||||
// compile, so we need to remove everything up to and including "/vendor/".
|
||||
// See https://github.com/golang/go/issues/12019.
|
||||
func impPath(imp string) string {
|
||||
if strings.HasPrefix(imp, "vendor/") {
|
||||
imp = "/" + imp
|
||||
}
|
||||
if i := strings.LastIndex(imp, "/vendor/"); i != -1 {
|
||||
imp = imp[i+len("/vendor/"):]
|
||||
}
|
||||
return imp
|
||||
}
|
||||
|
||||
// ErrorInterface represent built-in error interface.
|
||||
var ErrorInterface = Interface{
|
||||
Name: "error",
|
||||
Methods: []*Method{
|
||||
{
|
||||
Name: "Error",
|
||||
Out: []*Parameter{
|
||||
{
|
||||
Name: "",
|
||||
Type: PredeclaredType("string"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,51 +1,33 @@
|
||||
Direct Workflow bot
|
||||
===================
|
||||
|
||||
The project submodule is automatically updated by the direct bot whenever a branch is updated in a package repository.
|
||||
This bot can coexist with the Workflow PR bot, which is instead triggered by a new package PR.
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Devel project, where direct pushes to package git are possible.
|
||||
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
|
||||
1. Keep ProjectGit in sync with packages in the organization
|
||||
* **On pushes to package**: updates the submodule commit ID to the default branch HEAD (as configured in Gitea).
|
||||
* **On repository adds**: creates a new submodule (if non-empty).
|
||||
* **On repository removal**: removes the submodule.
|
||||
|
||||
**Note:** If you want to revert a change in a package, you need to do that manually in the project git.
|
||||
* on pushes to package, updates the submodule commit id
|
||||
to the default branch HEAD (as configured in Gitea)
|
||||
* on repository adds, creates a new submodule (if non empty)
|
||||
* on repository removal, removes the submodule
|
||||
|
||||
NOTE: reverts (push HEAD^) are not supported as they would step-on the
|
||||
work of the workflow-pr bot. Manual update of the project git is
|
||||
required in this case.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
Uses `workflow.config` for configuration.
|
||||
Uses `workflow.config` for configuration. Parameters
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *Workflows* | Type of workflow | yes | string | “direct” | |
|
||||
| *Organization* | The organization that holds all the packages | yes | string | | |
|
||||
| *Branch* | The designated branch for packages | no | string | | blank (default package branch) |
|
||||
| *GitProjectName* | Repository and branch where the ProjectGit lives. | no | string | **Format**: `org/project_repo#branch` | By default assumes `_ObsPrj` with default branch in the *Organization* |
|
||||
* _Workflows_: ["direct"] -- direct entry enables direct workflow. **Mandatory**
|
||||
* _Organization_: organization that holds all the packages. **Mandatory**
|
||||
* _Branch_: branch updated in repo's, or blank for default package branch
|
||||
* _GitProjectName_: package in above org, or `org/package#branch` for PrjGit. By default assumes `_ObsPrj` with default branch and in the `Organization`
|
||||
|
||||
NOTE: `-rm`, `-removed`, `-deleted` are all removed suffixes used to indicate current branch is a placeholder for previously existing package. These branches will be ignored by the bot, and if default, the package will be removed and will not be added to the project.
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Environment Variables
|
||||
-------
|
||||
|
||||
* `GITEA_TOKEN` (required)
|
||||
* `AMQP_USERNAME` (required)
|
||||
* `AMQP_PASSWORD` (required)
|
||||
* `AUTOGITS_CONFIG` (required)
|
||||
* `AUTOGITS_URL` - default: https://src.opensuse.org
|
||||
* `AUTOGITS_RABBITURL` - default: amqps://rabbit.opensuse.org
|
||||
* `AUTOGITS_DEBUG` - disabled by default, set to any value to enable
|
||||
* `AUTOGITS_CHECK_ON_START` - disabled by default, set to any value to enable
|
||||
* `AUTOGITS_REPO_PATH` - default is temporary directory
|
||||
* `AUTOGITS_IDENTITY_FILE` - in case where we need explicit identify path for ssh specified
|
||||
Devel project, where direct pushes to package git are possible
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -39,7 +40,7 @@ import (
|
||||
const (
|
||||
AppName = "direct_workflow"
|
||||
GitAuthor = "AutoGits prjgit-updater"
|
||||
GitEmail = "autogits-direct@noreply@src.opensuse.org"
|
||||
GitEmail = "adam+autogits-direct@zombino.com"
|
||||
)
|
||||
|
||||
var configuredRepos map[string][]*common.AutogitConfig
|
||||
@@ -52,6 +53,18 @@ func isConfiguredOrg(org *common.Organization) bool {
|
||||
return found
|
||||
}
|
||||
|
||||
func concatenateErrors(err1, err2 error) error {
|
||||
if err1 == nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
if err2 == nil {
|
||||
return err1
|
||||
}
|
||||
|
||||
return fmt.Errorf("%w\n%w", err1, err2)
|
||||
}
|
||||
|
||||
type RepositoryActionProcessor struct{}
|
||||
|
||||
func (*RepositoryActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
@@ -59,90 +72,69 @@ func (*RepositoryActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
configs, configFound := configuredRepos[action.Organization.Username]
|
||||
|
||||
if !configFound {
|
||||
common.LogInfo("Repository event for", action.Organization.Username, ". Not configured. Ignoring.", action.Organization.Username)
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Organization.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if org, repo, _ := config.GetPrjGit(); org == action.Repository.Owner.Username && repo == action.Repository.Name {
|
||||
common.LogError("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
log.Println("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
processConfiguredRepositoryAction(action, config)
|
||||
err = concatenateErrors(err, processConfiguredRepositoryAction(action, config))
|
||||
}
|
||||
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, config *common.AutogitConfig) {
|
||||
func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, config *common.AutogitConfig) error {
|
||||
gitOrg, gitPrj, gitBranch := config.GetPrjGit()
|
||||
git, err := gh.CreateGitHandler(config.Organization)
|
||||
common.PanicOnError(err)
|
||||
defer git.Close()
|
||||
|
||||
configBranch := config.Branch
|
||||
if len(configBranch) == 0 {
|
||||
configBranch = action.Repository.Default_Branch
|
||||
if common.IsRemovedBranch(configBranch) {
|
||||
common.LogDebug(" - default branch has deleted suffix. Skipping")
|
||||
return
|
||||
}
|
||||
|
||||
if len(configBranch) == 0 {
|
||||
common.LogDebug("Empty default branch in message. Maybe race-condition?")
|
||||
repo, err := gitea.GetRepository(action.Repository.Owner.Username, action.Repository.Name)
|
||||
if err != nil {
|
||||
common.LogError("Failed to fetch repository we have an event for?", action.Repository.Owner.Username, action.Repository.Name)
|
||||
return
|
||||
}
|
||||
|
||||
if len(repo.DefaultBranch) == 0 {
|
||||
common.LogError("Default branch is somehow empty. We cannot do anything.")
|
||||
return
|
||||
}
|
||||
configBranch = repo.DefaultBranch
|
||||
}
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, gitOrg, gitPrj)
|
||||
if err != nil {
|
||||
common.LogError("Error accessing/creating prjgit:", gitOrg, gitPrj, gitBranch, err)
|
||||
return
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s/%s#%s err: %w", gitOrg, gitPrj, gitBranch, err)
|
||||
}
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, prjGitRepo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
|
||||
switch action.Action {
|
||||
case "created":
|
||||
if action.Repository.Object_Format_Name != "sha256" {
|
||||
common.LogError(" - ", action.Repository.Name, "repo is not sha256. Ignoring.")
|
||||
return
|
||||
return fmt.Errorf(" - '%s' repo is not sha256. Ignoring.", action.Repository.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, action.Repository.Name), "branch", "--show-current"))
|
||||
if branch != configBranch {
|
||||
if err := git.GitExec(path.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "origin", configBranch+":"+configBranch); err != nil {
|
||||
common.LogError("error fetching branch", configBranch, ". ignoring as non-existent.", err) // no branch? so ignore repo here
|
||||
return
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, action.Repository.Name), "checkout", configBranch))
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, action.Repository.Name), "checkout", config.Branch))
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Auto-inclusion "+action.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Automatic package inclusion via Direct Workflow"))
|
||||
if !noop {
|
||||
common.PanicOnError(git.GitExec(gitPrj, "push"))
|
||||
}
|
||||
|
||||
case "deleted":
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
common.LogDebug("delete event for", action.Repository.Name, "-- not in project. Ignoring")
|
||||
return
|
||||
if DebugMode {
|
||||
log.Println("delete event for", action.Repository.Name, "-- not in project. Ignoring")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "rm", action.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Automatic package removal via Direct Workflow"))
|
||||
@@ -151,9 +143,10 @@ func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, co
|
||||
}
|
||||
|
||||
default:
|
||||
common.LogError("Unknown action type:", action.Action)
|
||||
return
|
||||
return fmt.Errorf("%s: %s", "Unknown action type", action.Action)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type PushActionProcessor struct{}
|
||||
@@ -163,83 +156,77 @@ func (*PushActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
configs, configFound := configuredRepos[action.Repository.Owner.Username]
|
||||
|
||||
if !configFound {
|
||||
common.LogDebug("Repository event for", action.Repository.Owner.Username, ". Not configured. Ignoring.")
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Repository.Owner.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if gitOrg, gitPrj, _ := config.GetPrjGit(); gitOrg == action.Repository.Owner.Username && gitPrj == action.Repository.Name {
|
||||
common.LogInfo("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
log.Println("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
processConfiguredPushAction(action, config)
|
||||
err = concatenateErrors(err, processConfiguredPushAction(action, config))
|
||||
}
|
||||
return nil
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredPushAction(action *common.PushWebhookEvent, config *common.AutogitConfig) {
|
||||
func processConfiguredPushAction(action *common.PushWebhookEvent, config *common.AutogitConfig) error {
|
||||
gitOrg, gitPrj, gitBranch := config.GetPrjGit()
|
||||
git, err := gh.CreateGitHandler(config.Organization)
|
||||
common.PanicOnError(err)
|
||||
defer git.Close()
|
||||
|
||||
common.LogDebug("push to:", action.Repository.Owner.Username, action.Repository.Name, "for:", gitOrg, gitPrj, gitBranch)
|
||||
branch := config.Branch
|
||||
if len(branch) == 0 {
|
||||
if common.IsRemovedBranch(branch) {
|
||||
common.LogDebug(" + default branch has removed suffix:", branch, "Skipping.")
|
||||
return
|
||||
}
|
||||
branch = action.Repository.Default_Branch
|
||||
common.LogDebug(" + using default branch", branch)
|
||||
log.Printf("push to: %s/%s for %s/%s#%s", action.Repository.Owner.Username, action.Repository.Name, gitOrg, gitPrj, gitBranch)
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
log.Println(" + default branch", action.Repository.Default_Branch)
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, gitOrg, gitPrj)
|
||||
if err != nil {
|
||||
common.LogError("Error accessing/creating prjgit:", gitOrg, gitPrj, err)
|
||||
return
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s/%s err: %w", gitOrg, gitPrj, err)
|
||||
}
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, prjGitRepo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
headCommitId, err := git.GitRemoteHead(gitPrj, remoteName, gitBranch)
|
||||
common.PanicOnError(err)
|
||||
commit, ok := git.GitSubmoduleCommitId(gitPrj, action.Repository.Name, headCommitId)
|
||||
for ok && action.Head_Commit.Id == commit {
|
||||
common.LogDebug(" -- nothing to do, commit already in ProjectGit")
|
||||
return
|
||||
log.Println(" -- nothing to do, commit already in ProjectGit")
|
||||
return nil
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil {
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name)
|
||||
common.LogDebug("Pushed to package that is not part of the project. Re-adding...", err)
|
||||
} else if !stat.IsDir() {
|
||||
common.LogError("Pushed to a package that is not a submodule but exists in the project. Ignoring.")
|
||||
return
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if DebugMode {
|
||||
log.Println("Pushed to package that is not part of the project. Ignoring:", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--force", "--depth", "1", "--checkout", action.Repository.Name)
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--depth", "1", "--checkout", action.Repository.Name)
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
if err := git.GitExec(filepath.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "--force", "origin", branch+":"+branch); err != nil {
|
||||
common.LogError("Error fetching branch:", branch, "Ignoring as non-existent.", err)
|
||||
return
|
||||
if err := git.GitExec(filepath.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "--force", remoteName, config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
id, err := git.GitBranchHead(filepath.Join(gitPrj, action.Repository.Name), branch)
|
||||
id, err := git.GitRemoteHead(filepath.Join(gitPrj, action.Repository.Name), remoteName, config.Branch)
|
||||
common.PanicOnError(err)
|
||||
if action.Head_Commit.Id == id {
|
||||
git.GitExecOrPanic(filepath.Join(gitPrj, action.Repository.Name), "checkout", id)
|
||||
git.GitExecOrPanic(gitPrj, "commit", "-a", "-m", fmt.Sprintf("'%s' update via Direct Workflow", action.Repository.Name))
|
||||
git.GitExecOrPanic(gitPrj, "commit", "-a", "-m", "Automatic update via push via Direct Workflow")
|
||||
if !noop {
|
||||
git.GitExecOrPanic(gitPrj, "push", remoteName)
|
||||
}
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
common.LogDebug("push of refs not on the configured branch", branch, ". ignoring.")
|
||||
log.Println("push of refs not on the configured branch", config.Branch, ". ignoring.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func verifyProjectState(git common.Git, org string, config *common.AutogitConfig, configs []*common.AutogitConfig) (err error) {
|
||||
@@ -261,65 +248,51 @@ func verifyProjectState(git common.Git, org string, config *common.AutogitConfig
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, repo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
common.LogDebug(" * Getting submodule list")
|
||||
log.Println(" * Getting submodule list")
|
||||
sub, err := git.GitSubmoduleList(gitPrj, "HEAD")
|
||||
common.PanicOnError(err)
|
||||
|
||||
common.LogDebug(" * Getting package links")
|
||||
log.Println(" * Getting package links")
|
||||
var pkgLinks []*PackageRebaseLink
|
||||
if f, err := fs.Stat(os.DirFS(path.Join(git.GetPath(), gitPrj)), common.PrjLinksFile); err == nil && (f.Mode()&fs.ModeType == 0) && f.Size() < 1000000 {
|
||||
if data, err := os.ReadFile(path.Join(git.GetPath(), gitPrj, common.PrjLinksFile)); err == nil {
|
||||
pkgLinks, err = parseProjectLinks(data)
|
||||
if err != nil {
|
||||
common.LogError("Cannot parse project links file:", err.Error())
|
||||
log.Println("Cannot parse project links file:", err.Error())
|
||||
pkgLinks = nil
|
||||
} else {
|
||||
ResolveLinks(org, pkgLinks, gitea)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
common.LogInfo(" - No package links defined")
|
||||
log.Println(" - No package links defined")
|
||||
}
|
||||
|
||||
/* Check existing submodule that they are updated */
|
||||
|
||||
isGitUpdated := false
|
||||
next_package:
|
||||
for filename, commitId := range sub {
|
||||
// ignore project gits
|
||||
//for _, c := range configs {
|
||||
if gitPrj == filename {
|
||||
common.LogDebug(" prjgit as package? ignoring project git:", filename)
|
||||
log.Println(" prjgit as package? ignoring project git:", filename)
|
||||
continue next_package
|
||||
}
|
||||
//}
|
||||
|
||||
branch := config.Branch
|
||||
common.LogDebug(" verifying package:", commitId, "->", filename, "@", branch)
|
||||
if repo, err := gitea.GetRepository(org, filename); repo == nil && err == nil {
|
||||
common.LogDebug(" repository removed...")
|
||||
git.GitExecOrPanic(gitPrj, "rm", filename)
|
||||
isGitUpdated = true
|
||||
continue
|
||||
} else if err != nil {
|
||||
common.LogError("failed fetching repo data", org, filename, err)
|
||||
continue
|
||||
} else if len(branch) == 0 {
|
||||
branch = repo.DefaultBranch
|
||||
common.LogDebug(" -> using default branch", branch)
|
||||
if common.IsRemovedBranch(branch) {
|
||||
common.LogDebug(" Default branch for", filename, "is excluded")
|
||||
log.Printf(" verifying package: %s -> %s(%s)", commitId, filename, config.Branch)
|
||||
commits, err := gitea.GetRecentCommits(org, filename, config.Branch, 10)
|
||||
if len(commits) == 0 {
|
||||
if repo, err := gitea.GetRepository(org, filename); repo == nil && err == nil {
|
||||
git.GitExecOrPanic(gitPrj, "rm", filename)
|
||||
isGitUpdated = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
commits, err := gitea.GetRecentCommits(org, filename, branch, 10)
|
||||
if err != nil {
|
||||
common.LogDebug(" -> failed to fetch recent commits for package:", filename, " Err:", err)
|
||||
log.Println(" -> failed to fetch recent commits for package:", filename, " Err:", err)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -336,7 +309,7 @@ next_package:
|
||||
if l.Pkg == filename {
|
||||
link = l
|
||||
|
||||
common.LogDebug(" -> linked package")
|
||||
log.Println(" -> linked package")
|
||||
// so, we need to rebase here. Can't really optimize, so clone entire package tree and remote
|
||||
pkgPath := path.Join(gitPrj, filename)
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--checkout", filename)
|
||||
@@ -350,7 +323,7 @@ next_package:
|
||||
nCommits := len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgPath, "rev-list", "^NOW", "HEAD"), "\n"))
|
||||
if nCommits > 0 {
|
||||
if !noop {
|
||||
git.GitExecOrPanic(pkgPath, "push", "-f", "origin", "HEAD:"+branch)
|
||||
git.GitExecOrPanic(pkgPath, "push", "-f", "origin", "HEAD:"+config.Branch)
|
||||
}
|
||||
isGitUpdated = true
|
||||
}
|
||||
@@ -367,27 +340,42 @@ next_package:
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "update", "--init", "--depth", "1", "--checkout", filename))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(gitPrj, filename), "fetch", "--depth", "1", "origin", commits[0].SHA))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(gitPrj, filename), "checkout", commits[0].SHA))
|
||||
common.LogDebug(" -> updated to", commits[0].SHA)
|
||||
log.Println(" -> updated to", commits[0].SHA)
|
||||
isGitUpdated = true
|
||||
} else {
|
||||
// probably need `merge-base` or `rev-list` here instead, or the project updated already
|
||||
common.LogInfo(" *** Cannot find SHA of last matching update for package:", filename, " Ignoring")
|
||||
log.Println(" *** Cannot find SHA of last matching update for package:", filename, " Ignoring")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// find all missing repositories, and add them
|
||||
common.LogDebug("checking for missing repositories...")
|
||||
if DebugMode {
|
||||
log.Println("checking for missing repositories...")
|
||||
}
|
||||
repos, err := gitea.GetOrganizationRepositories(org)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
common.LogDebug(" nRepos:", len(repos))
|
||||
if DebugMode {
|
||||
log.Println(" nRepos:", len(repos))
|
||||
}
|
||||
|
||||
/* Check repositories in org to make sure they are included in project git */
|
||||
next_repo:
|
||||
for _, r := range repos {
|
||||
if DebugMode {
|
||||
log.Println(" -- checking", r.Name)
|
||||
}
|
||||
|
||||
if r.ObjectFormatName != "sha256" {
|
||||
if DebugMode {
|
||||
log.Println(" + ", r.ObjectFormatName, ". Needs to be sha256. Ignoring")
|
||||
}
|
||||
continue next_repo
|
||||
}
|
||||
|
||||
// for _, c := range configs {
|
||||
if gitPrj == r.Name {
|
||||
// ignore project gits
|
||||
@@ -402,45 +390,43 @@ next_repo:
|
||||
}
|
||||
}
|
||||
|
||||
common.LogDebug(" -- checking repository:", r.Name)
|
||||
|
||||
branch := config.Branch
|
||||
if len(branch) == 0 {
|
||||
branch = r.DefaultBranch
|
||||
if common.IsRemovedBranch(branch) {
|
||||
continue
|
||||
}
|
||||
if DebugMode {
|
||||
log.Println(" -- checking repository:", r.Name)
|
||||
}
|
||||
if commits, err := gitea.GetRecentCommits(org, r.Name, branch, 1); err != nil || len(commits) == 0 {
|
||||
|
||||
if _, err := gitea.GetRecentCommits(org, r.Name, config.Branch, 1); err != nil {
|
||||
// assumption that package does not exist, so not part of project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
|
||||
// or, we do not have commits here
|
||||
continue
|
||||
}
|
||||
|
||||
// add repository to git project
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", r.CloneURL, r.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--depth", "1", r.CloneURL, r.Name))
|
||||
|
||||
curBranch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, r.Name), "branch", "--show-current"))
|
||||
if branch != curBranch {
|
||||
if err := git.GitExec(path.Join(gitPrj, r.Name), "fetch", "--depth", "1", "origin", branch+":"+branch); err != nil {
|
||||
return fmt.Errorf("Fetching branch %s for %s/%s failed. Ignoring.", branch, repo.Owner.UserName, r.Name)
|
||||
if len(config.Branch) > 0 {
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, r.Name), "branch", "--show-current"))
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(gitPrj, r.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("Fetching branch %s for %s/%s failed. Ignoring.", config.Branch, repo.Owner.UserName, r.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, r.Name), "checkout", config.Branch))
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, r.Name), "checkout", branch))
|
||||
}
|
||||
|
||||
isGitUpdated = true
|
||||
}
|
||||
|
||||
if isGitUpdated {
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-a", "-m", "Periodic SYNC in Direct Workflow"))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-a", "-m", "Automatic update via push via Direct Workflow -- SYNC"))
|
||||
if !noop {
|
||||
git.GitExecOrPanic(gitPrj, "push", remoteName)
|
||||
}
|
||||
}
|
||||
|
||||
common.LogInfo("Verification finished for ", org, ", prjgit:", config.GitProjectName)
|
||||
if DebugMode {
|
||||
log.Println("Verification finished for ", org, ", prjgit:", config.GitProjectName)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -451,17 +437,17 @@ var checkInterval time.Duration
|
||||
func checkOrg(org string, configs []*common.AutogitConfig) {
|
||||
git, err := gh.CreateGitHandler(org)
|
||||
if err != nil {
|
||||
common.LogError("Failed to allocate GitHandler:", err)
|
||||
log.Println("Faield to allocate GitHandler:", err)
|
||||
return
|
||||
}
|
||||
defer git.Close()
|
||||
|
||||
for _, config := range configs {
|
||||
common.LogInfo(" ++ starting verification, org:", org, "config:", config.GitProjectName)
|
||||
log.Printf(" ++ starting verification, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
if err := verifyProjectState(git, org, config, configs); err != nil {
|
||||
common.LogError(" *** verification failed, org:", org, err)
|
||||
log.Printf(" *** verification failed, org: `%s`, err: %#v\n", org, err)
|
||||
} else {
|
||||
common.LogError(" ++ verification complete, org:", org, config.GitProjectName)
|
||||
log.Printf(" ++ verification complete, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -470,7 +456,7 @@ func checkRepos() {
|
||||
for org, configs := range configuredRepos {
|
||||
if checkInterval > 0 {
|
||||
sleepInterval := checkInterval - checkInterval/2 + time.Duration(rand.Int63n(int64(checkInterval)))
|
||||
common.LogInfo(" - sleep interval", sleepInterval, "until next check")
|
||||
log.Println(" - sleep interval", sleepInterval, "until next check")
|
||||
time.Sleep(sleepInterval)
|
||||
}
|
||||
|
||||
@@ -482,9 +468,9 @@ func consistencyCheckProcess() {
|
||||
if checkOnStart {
|
||||
savedCheckInterval := checkInterval
|
||||
checkInterval = 0
|
||||
common.LogInfo("== Startup consistency check begin...")
|
||||
log.Println("== Startup consistency check begin...")
|
||||
checkRepos()
|
||||
common.LogInfo("== Startup consistency check done...")
|
||||
log.Println("== Startup consistency check done...")
|
||||
checkInterval = savedCheckInterval
|
||||
}
|
||||
|
||||
@@ -499,8 +485,7 @@ var gh common.GitHandlerGenerator
|
||||
func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
configFile, err := common.ReadConfigFile(configFilename)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(4)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
configs, _ := common.ResolveWorkflowConfigs(gitea, configFile)
|
||||
@@ -508,7 +493,9 @@ func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
*orgs = make([]string, 0, 1)
|
||||
for _, c := range configs {
|
||||
if slices.Contains(c.Workflows, "direct") {
|
||||
common.LogDebug(" + adding org:", c.Organization, ", branch:", c.Branch, ", prjgit:", c.GitProjectName)
|
||||
if DebugMode {
|
||||
log.Printf(" + adding org: '%s', branch: '%s', prjgit: '%s'\n", c.Organization, c.Branch, c.GitProjectName)
|
||||
}
|
||||
configs := configuredRepos[c.Organization]
|
||||
if configs == nil {
|
||||
configs = make([]*common.AutogitConfig, 0, 1)
|
||||
@@ -522,7 +509,7 @@ func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
}
|
||||
|
||||
func main() {
|
||||
configFilename := flag.String("config", "config.json", "List of PrjGit")
|
||||
configFilename := flag.String("config", "", "List of PrjGit")
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
flag.BoolVar(&DebugMode, "debug", false, "Extra debugging information")
|
||||
@@ -533,35 +520,10 @@ func main() {
|
||||
flag.Parse()
|
||||
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if cf := os.Getenv("AUTOGITS_CONFIG"); len(cf) > 0 {
|
||||
*configFilename = cf
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_URL"); len(url) > 0 {
|
||||
*giteaUrl = url
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_RABBITURL"); len(url) > 0 {
|
||||
*rabbitUrl = url
|
||||
}
|
||||
if debug := os.Getenv("AUTOGITS_DEBUG"); len(debug) > 0 {
|
||||
DebugMode = true
|
||||
}
|
||||
if check := os.Getenv("AUTOGITS_CHECK_ON_START"); len(check) > 0 {
|
||||
checkOnStart = true
|
||||
}
|
||||
if p := os.Getenv("AUTOGITS_REPO_PATH"); len(p) > 0 {
|
||||
*basePath = p
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
defs := &common.RabbitMQGiteaEventsProcessor{}
|
||||
@@ -570,14 +532,12 @@ func main() {
|
||||
if len(*basePath) == 0 {
|
||||
*basePath, err = os.MkdirTemp(os.TempDir(), AppName)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
gh, err = common.AllocateGitWorkTree(*basePath, GitAuthor, GitEmail)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// handle reconfiguration
|
||||
@@ -592,10 +552,10 @@ func main() {
|
||||
}
|
||||
|
||||
if sig != syscall.SIGHUP {
|
||||
common.LogError("Unexpected signal received:", sig)
|
||||
log.Println("Unexpected signal received:", sig)
|
||||
continue
|
||||
}
|
||||
common.LogError("*** Reconfiguring ***")
|
||||
log.Println("*** Reconfiguring ***")
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
defs.Connection().UpdateTopics(defs)
|
||||
}
|
||||
@@ -607,25 +567,23 @@ func main() {
|
||||
gitea = common.AllocateGiteaTransport(*giteaUrl)
|
||||
CurrentUser, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
common.LogError("Cannot fetch current user:", err)
|
||||
os.Exit(2)
|
||||
log.Fatalln("Cannot fetch current user:", err)
|
||||
}
|
||||
common.LogInfo("Current User:", CurrentUser.UserName)
|
||||
log.Println("Current User:", CurrentUser.UserName)
|
||||
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
|
||||
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
|
||||
if err != nil {
|
||||
common.LogError("cannot parse server URL. Err:", err)
|
||||
os.Exit(3)
|
||||
log.Panicf("cannot parse server URL. Err: %#v\n", err)
|
||||
}
|
||||
|
||||
go consistencyCheckProcess()
|
||||
common.LogInfo("defs:", *defs)
|
||||
log.Println("defs:", *defs)
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
|
||||
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
|
||||
|
||||
common.LogError(common.ProcessRabbitMQEvents(defs))
|
||||
log.Fatal(common.ProcessRabbitMQEvents(defs))
|
||||
}
|
||||
|
||||
@@ -1,65 +1,54 @@
|
||||
Workflow-PR bot
|
||||
===============
|
||||
|
||||
Keeps ProjectGit PRs in-sync with the relative PackageGit PRs.
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Detects a PackageGit PR creation against a package and creates a coresponsing PR against the ProjectGit
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated
|
||||
* Stores reference to the PackageGit PR in the headers of the ProjectGit PR comments, for later reference
|
||||
* this allows ProjectGit PR to be merged to seperated later (via another tool, for example)
|
||||
* Initiates all staging workflows via review requests
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Any project (devel, codestream, product, etc.) that accepts PRs.
|
||||
Any project (devel, etc) that accepts PR
|
||||
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
|
||||
* **Synchronization**:
|
||||
* When a **PackageGit PR** is created for a package on a specific project branch, a corresponding PR is automatically generated in **ProjectGit**.
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated.
|
||||
* A link to the PackageGit PR is stored in the body of the ProjectGit PR comments in the following format:
|
||||
* `PR: organization/package_name!pull_request_number`
|
||||
* Example: `PR: pool/curl!4`
|
||||
* It closes an empty ProjectGit PR (e.g., if a PR was initially created for a single package but later integrated into a larger ProjectGit PR).
|
||||
* It forwards the Work In Progress (WIP) flag to the ProjectGit PR. If the ProjectGit PR references multiple Package PRs, triggering the WIP flag on the ProjectGit PR side only requires a single WIP package PR.
|
||||
|
||||
* **Reviewer Management**:
|
||||
* It adds required reviewers in the ProjectGit PR.
|
||||
* It adds required reviewers in the PackageGit PR.
|
||||
* If new commits are added to a PackageGit PR, reviewers who have already approved it will be re-added.
|
||||
|
||||
* **Merge Management**:
|
||||
* Manages PR merges based on configuration flags (`ManualMergeOnly`, `ManualMergeProject`).
|
||||
* In general, merge only happens if all mandatory reviews are completed.
|
||||
* **ManualMergeProject** is stricter than **ManualMergeOnly** and has higher priority.
|
||||
|
||||
| Flag | Value | Behavior |
|
||||
| ----- | ----- | ----- |
|
||||
| ManualMergeProject | true | Both ProjectGit and PackageGit PRs are merged upon an allowed project maintainer commenting "merge ok” in the ProjectGit PR. |
|
||||
| ManualMergeOnly | true | Both PackageGit PR and ProjectGit PR are merged upon an allowed package maintainer or project maintainer commenting “merge ok” in the PackageGit PR. |
|
||||
| ManualMergeOnly and ManualMergeProject | false | Both ProjectGit and PackageGit PRs are merged as soon as all reviews are completed in both PrjGit and PkgGit PRs. |
|
||||
|
||||
Config file
|
||||
-----------
|
||||
JSON
|
||||
* _Workflows_: ["pr"] -- pr entry enables pr workflow. **Mandatory**
|
||||
* _Organization_: organization that holds all the packages **Mandatory**
|
||||
* _Branch_: branch updated in repo's **Mandatory**
|
||||
* _GitProjectName_: package in above org, or `org/package#branch` for PrjGit. By default assumes `_ObsPrj` with default branch and in the `Organization`
|
||||
* _Reviewers_: accounts associated with mandatory reviews for PrjGit. Can trigger additional
|
||||
review requests for PrjGit or associated PkgGit repos. Only when all reviews are
|
||||
satisfied, will the PrjGit PR be merged. See Reviewers below.
|
||||
* _ManualMergeOnly_: (true, false) only merge if "merge ok" comment/review by package or project maintainers or reviewers
|
||||
* _ManualMergeProject_: (true, false) only merge if "merge ok" by project maintainers or reviewers
|
||||
* _ReviewRequired_: (true, false) ignores that submitter is a maintainer and require a review from other maintainer IFF available
|
||||
* _NoProjectGitPR_: (true, false) do not create PrjGit PRs, but still process reviews, etc.
|
||||
* _Permissions_: permissions and associated accounts/groups. See below.
|
||||
|
||||
* Filename: `workflow.config`
|
||||
* Location: ProjectGit
|
||||
* Format: non-standard JSON (comments allowed)
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *Workflows* | Type of workflow | yes | string | “pr” | |
|
||||
| *Organization* | The organization where PackageGit PRs are expected to occur | yes | string | | |
|
||||
| *Branch* | The designated branch for PackageGit PRs | yes | string | | |
|
||||
| *GitProjectName* | Repository and branch where the ProjectGit lives. | no | string | **Format**: `org/project_repo#branch` | By default assumes `_ObsPrj` with default branch in the *Organization* |
|
||||
| *ManualMergeOnly* | Merges are permitted only upon receiving a "merge ok" comment from designated maintainers in the PkgGit PR. | no | bool | true, false | false |
|
||||
| *ManualMergeProject* | Merges are permitted only upon receiving a "merge ok" comment in the ProjectGit PR from project maintainers. | no | bool | true, false | false |
|
||||
| *ReviewRequired* | (NOT IMPLEMENTED) If submitter is a maintainer, require review from another maintainer if available. | no | bool | true, false | false |
|
||||
| *NoProjectGitPR* | Do not create PrjGit PR, but still perform other tasks. | no | bool | true, false | false |
|
||||
| *Reviewers* | PrjGit reviewers. Additional review requests are triggered for associated PkgGit PRs. PrjGit PR is merged only when all reviews are complete. | no | array of strings | | `[]` |
|
||||
| *ReviewGroups* | If a group is specified in Reviewers, its members are listed here. | no | array of objects | | `[]` |
|
||||
| *ReviewGroups > Name* | Name of the group | no | string | | |
|
||||
| *ReviewGroups > Reviewers* | Members of the group | no | array of strings | | |
|
||||
| *ReviewGroups > Silent* | Add members for notifications. If true, members are not explicitly requested to review. If one member approves, others are removed. | no | bool | true, false | false |
|
||||
NOTE: `-rm`, `-removed`, `-deleted` are all removed suffixes used to indicate current branch is a placeholder for previously existing package. These branches will be ignored by the bot, and if default, the package will be removed and will not be added to the project.
|
||||
example:
|
||||
|
||||
[
|
||||
{
|
||||
"Workflows": ["pr", "direct"],
|
||||
"Organization": "autogits",
|
||||
"GitProjectName": "HiddenPrj",
|
||||
"Branch": "hidden",
|
||||
"Reviewers": []
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
Reviewers
|
||||
---------
|
||||
@@ -68,72 +57,39 @@ Reviews is a list of accounts that need to review package and/or project. They h
|
||||
|
||||
[~][*|-|+]username
|
||||
|
||||
A tilde (`~`) before a prefix signifies an advisory reviewer. Their input is requested, but their review status will not otherwise affect the process.
|
||||
General prefix of ~ indicates advisory reviewer. They will be requested, but ignored otherwise.
|
||||
|
||||
Other prefixes indicate project or package association of the reviewer:
|
||||
|
||||
* `*` indicates project *and* package
|
||||
* `-` indicates project-only reviewer
|
||||
* `+` indicates package-only reviewer
|
||||
|
||||
`+` is implied.
|
||||
`+` is implied. For example
|
||||
|
||||
For example: `[foo, -bar, ~*moo]` results in:
|
||||
* foo: package reviews
|
||||
* bar: project reviews
|
||||
* moo: package and project reviews, but ignored
|
||||
`[foo, -bar, ~*moo]`
|
||||
|
||||
Package Deletion Requests
|
||||
-------------------------
|
||||
(NOT YET IMPLEMENTED)
|
||||
|
||||
* **Removing a Package:**
|
||||
To remove a package from a project, submit a ProjectGit Pull Request (PR) that removes the corresponding submodule. The bot will then rename the project branch in the pool by appending "-removed" to its name.
|
||||
|
||||
* **Adding a Package Again:**
|
||||
If you wish to re-add a package, create a new PrjGit PR which adds again the submodule on the branch that has the "-removed" suffix. The bot will automatically remove this suffix from the project branch in the pool.
|
||||
|
||||
|
||||
Labels
|
||||
------
|
||||
|
||||
The following labels are used, when defined in Repo/Org.
|
||||
|
||||
| Label Config Entry | Default label | Description
|
||||
|--------------------|----------------|----------------------------------------
|
||||
| StagingAuto | staging/Auto | Assigned to Project Git PRs when first staged
|
||||
| ReviewPending | review/Pending | Assigned to Project Git PR when package reviews are still pending
|
||||
| ReviewDone | review/Done | Assigned to Project Git PR when reviews are complete on all package PRs
|
||||
results in
|
||||
* foo -> package reviews
|
||||
* bar -> project reviews
|
||||
* moo -> package and project reviews, but ignored
|
||||
|
||||
|
||||
Maintainership
|
||||
--------------
|
||||
|
||||
Filename: \_maintainership.json
|
||||
Location: ProjectGit
|
||||
Format: JSON
|
||||
Fields:
|
||||
Maintainership information is defined per project. For reviews, package maintainers are coalesced
|
||||
with project maintainers. A review by any of the maintainers is acceptable.
|
||||
|
||||
| Key | Value | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| package name | array of strings representing the package maintainers | List of package maintainers |
|
||||
| “” (empty string) | array of strings representing the project maintainers | List of project maintainers |
|
||||
example:
|
||||
|
||||
Maintainership information is defined per project. For PackageGit PR reviews, package maintainers are combined with project maintainers. A review by any of these maintainers is acceptable.
|
||||
{
|
||||
"package1": [ "reviewer", "reviewer2"],
|
||||
"package2": [],
|
||||
|
||||
If the submitter is a maintainer it will not get a review requested.
|
||||
// "project" maintainer
|
||||
"": ["reviewer3", "reviewer4"]
|
||||
}
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
{
|
||||
"package1": [ "reviewer", "reviewer2"],
|
||||
"package2": [],
|
||||
|
||||
// "project" maintainer
|
||||
"": ["reviewer3", "reviewer4"]
|
||||
}
|
||||
```
|
||||
|
||||
Permissions
|
||||
-----------
|
||||
@@ -153,11 +109,3 @@ the `workflow.config`.
|
||||
|
||||
NOTE: Project Maintainers have these permissions automatically.
|
||||
|
||||
Server configuration
|
||||
--------------------------
|
||||
|
||||
**Configuration file:**
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| root | Array of string | Format **org/repo\#branch** |
|
||||
@@ -7,7 +7,7 @@ import "src.opensuse.org/autogits/common"
|
||||
|
||||
type StateChecker interface {
|
||||
VerifyProjectState(configs *common.AutogitConfig) ([]*PRToProcess, error)
|
||||
CheckRepos()
|
||||
CheckRepos() error
|
||||
ConsistencyCheckProcess() error
|
||||
}
|
||||
|
||||
|
||||
@@ -170,7 +170,7 @@ func main() {
|
||||
common.RequestType_PRSync: req,
|
||||
common.RequestType_PRReviewAccepted: req,
|
||||
common.RequestType_PRReviewRejected: req,
|
||||
common.RequestType_PRComment: req,
|
||||
common.RequestType_IssueComment: req,
|
||||
},
|
||||
}
|
||||
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: pr_processor.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=pr_processor.go -destination=mock/pr_processor.go -typed
|
||||
//
|
||||
|
||||
// Package mock_main is a generated GoMock package.
|
||||
package mock_main
|
||||
@@ -1,157 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: state_checker.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=state_checker.go -destination=../mock/state_checker.go -typed -package mock_main
|
||||
//
|
||||
|
||||
// Package mock_main is a generated GoMock package.
|
||||
package mock_main
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
common "src.opensuse.org/autogits/common"
|
||||
interfaces "src.opensuse.org/autogits/workflow-pr/interfaces"
|
||||
)
|
||||
|
||||
// MockStateChecker is a mock of StateChecker interface.
|
||||
type MockStateChecker struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockStateCheckerMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockStateCheckerMockRecorder is the mock recorder for MockStateChecker.
|
||||
type MockStateCheckerMockRecorder struct {
|
||||
mock *MockStateChecker
|
||||
}
|
||||
|
||||
// NewMockStateChecker creates a new mock instance.
|
||||
func NewMockStateChecker(ctrl *gomock.Controller) *MockStateChecker {
|
||||
mock := &MockStateChecker{ctrl: ctrl}
|
||||
mock.recorder = &MockStateCheckerMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockStateChecker) EXPECT() *MockStateCheckerMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// CheckRepos mocks base method.
|
||||
func (m *MockStateChecker) CheckRepos() error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "CheckRepos")
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// CheckRepos indicates an expected call of CheckRepos.
|
||||
func (mr *MockStateCheckerMockRecorder) CheckRepos() *MockStateCheckerCheckReposCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckRepos", reflect.TypeOf((*MockStateChecker)(nil).CheckRepos))
|
||||
return &MockStateCheckerCheckReposCall{Call: call}
|
||||
}
|
||||
|
||||
// MockStateCheckerCheckReposCall wrap *gomock.Call
|
||||
type MockStateCheckerCheckReposCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockStateCheckerCheckReposCall) Return(arg0 error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockStateCheckerCheckReposCall) Do(f func() error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockStateCheckerCheckReposCall) DoAndReturn(f func() error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// ConsistencyCheckProcess mocks base method.
|
||||
func (m *MockStateChecker) ConsistencyCheckProcess() error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ConsistencyCheckProcess")
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// ConsistencyCheckProcess indicates an expected call of ConsistencyCheckProcess.
|
||||
func (mr *MockStateCheckerMockRecorder) ConsistencyCheckProcess() *MockStateCheckerConsistencyCheckProcessCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConsistencyCheckProcess", reflect.TypeOf((*MockStateChecker)(nil).ConsistencyCheckProcess))
|
||||
return &MockStateCheckerConsistencyCheckProcessCall{Call: call}
|
||||
}
|
||||
|
||||
// MockStateCheckerConsistencyCheckProcessCall wrap *gomock.Call
|
||||
type MockStateCheckerConsistencyCheckProcessCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockStateCheckerConsistencyCheckProcessCall) Return(arg0 error) *MockStateCheckerConsistencyCheckProcessCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockStateCheckerConsistencyCheckProcessCall) Do(f func() error) *MockStateCheckerConsistencyCheckProcessCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockStateCheckerConsistencyCheckProcessCall) DoAndReturn(f func() error) *MockStateCheckerConsistencyCheckProcessCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// VerifyProjectState mocks base method.
|
||||
func (m *MockStateChecker) VerifyProjectState(configs *common.AutogitConfig) ([]*interfaces.PRToProcess, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "VerifyProjectState", configs)
|
||||
ret0, _ := ret[0].([]*interfaces.PRToProcess)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// VerifyProjectState indicates an expected call of VerifyProjectState.
|
||||
func (mr *MockStateCheckerMockRecorder) VerifyProjectState(configs any) *MockStateCheckerVerifyProjectStateCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VerifyProjectState", reflect.TypeOf((*MockStateChecker)(nil).VerifyProjectState), configs)
|
||||
return &MockStateCheckerVerifyProjectStateCall{Call: call}
|
||||
}
|
||||
|
||||
// MockStateCheckerVerifyProjectStateCall wrap *gomock.Call
|
||||
type MockStateCheckerVerifyProjectStateCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Return(arg0 []*interfaces.PRToProcess, arg1 error) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Do(f func(*common.AutogitConfig) ([]*interfaces.PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) DoAndReturn(f func(*common.AutogitConfig) ([]*interfaces.PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
@@ -41,9 +41,6 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
|
||||
refs = append(refs, ref)
|
||||
}
|
||||
|
||||
slices.Sort(title_refs)
|
||||
slices.Sort(refs)
|
||||
|
||||
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
|
||||
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, "\n") + "\n"
|
||||
|
||||
@@ -230,18 +227,12 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
|
||||
}
|
||||
|
||||
title, desc := PrjGitDescription(prset)
|
||||
pr, err, isNew := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
|
||||
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
|
||||
if err != nil {
|
||||
common.LogError("Error creating PrjGit PR:", err)
|
||||
return err
|
||||
}
|
||||
org := PrjGit.Owner.UserName
|
||||
repo := PrjGit.Name
|
||||
idx := pr.Index
|
||||
if isNew {
|
||||
Gitea.SetLabels(org, repo, idx, []string{prset.Config.Label(common.Label_StagingAuto)})
|
||||
}
|
||||
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
|
||||
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, pr.Index, &models.EditPullRequestOption{
|
||||
RemoveDeadline: true,
|
||||
})
|
||||
|
||||
@@ -278,7 +269,6 @@ func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch
|
||||
}
|
||||
|
||||
var updatePrjGitError_requeue error = errors.New("Commits do not match. Requeing after 5 seconds.")
|
||||
|
||||
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
_, _, PrjGitBranch := prset.Config.GetPrjGit()
|
||||
PrjGitPR, err := prset.GetPrjGitPR()
|
||||
@@ -355,20 +345,7 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
}
|
||||
|
||||
// update PR
|
||||
isPrTitleSame := func(CurrentTitle, NewTitle string) bool {
|
||||
ctlen := len(CurrentTitle)
|
||||
for _, suffix := range []string{"...", "…"} {
|
||||
slen := len(suffix)
|
||||
if ctlen > 250 && strings.HasSuffix(CurrentTitle, suffix) && len(NewTitle) > ctlen {
|
||||
NewTitle = NewTitle[0:ctlen-slen] + suffix
|
||||
if CurrentTitle == NewTitle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return CurrentTitle == NewTitle
|
||||
}
|
||||
if PrjGitPR.PR.User.UserName == CurrentUser.UserName && (PrjGitPR.PR.Body != PrjGitBody || !isPrTitleSame(PrjGitPR.PR.Title, PrjGitTitle)) {
|
||||
if PrjGitPR.PR.Body != PrjGitBody || PrjGitPR.PR.Title != PrjGitTitle {
|
||||
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
|
||||
RemoveDeadline: true,
|
||||
Title: PrjGitTitle,
|
||||
@@ -404,10 +381,6 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
prjGitPRbranch := prGitBranchNameForPR(prRepo, prNo)
|
||||
prjGitPR, err := prset.GetPrjGitPR()
|
||||
if err == common.PRSet_PrjGitMissing {
|
||||
if req.State != "open" {
|
||||
common.LogDebug("This PR is closed and no ProjectGit PR. Ignoring.")
|
||||
return nil
|
||||
}
|
||||
common.LogDebug("Missing PrjGit. Need to create one under branch", prjGitPRbranch)
|
||||
|
||||
if err = pr.CreatePRjGitPR(prjGitPRbranch, prset); err != nil {
|
||||
@@ -554,14 +527,6 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// update prset if we should build it or not
|
||||
if prjGitPR != nil {
|
||||
if file, err := git.GitCatFile(common.DefaultGitPrj, prjGitPR.PR.Head.Sha, "staging.config"); err == nil {
|
||||
prset.HasAutoStaging = (file != nil)
|
||||
common.LogDebug(" -> automatic staging enabled?:", prset.HasAutoStaging)
|
||||
}
|
||||
}
|
||||
|
||||
// handle case where PrjGit PR is only one left and there are no changes, then we can just close the PR
|
||||
if len(prset.PRs) == 1 && prjGitPR != nil && prset.PRs[0] == prjGitPR && prjGitPR.PR.User.UserName == prset.BotUser {
|
||||
common.LogDebug(" --> checking if superflous PR")
|
||||
@@ -607,7 +572,7 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
|
||||
type RequestProcessor struct {
|
||||
configuredRepos map[string][]*common.AutogitConfig
|
||||
recursive int
|
||||
recursive int
|
||||
}
|
||||
|
||||
func ProcesPullRequest(pr *models.PullRequest, configs []*common.AutogitConfig) error {
|
||||
@@ -648,7 +613,7 @@ func (w *RequestProcessor) ProcessFunc(request *common.Request) (err error) {
|
||||
common.LogError("Cannot find PR for issue:", req.Pull_Request.Base.Repo.Owner.Username, req.Pull_Request.Base.Repo.Name, req.Pull_Request.Number)
|
||||
return err
|
||||
}
|
||||
} else if req, ok := request.Data.(*common.IssueCommentWebhookEvent); ok {
|
||||
} else if req, ok := request.Data.(*common.IssueWebhookEvent); ok {
|
||||
pr, err = Gitea.GetPullRequest(req.Repository.Owner.Username, req.Repository.Name, int64(req.Issue.Number))
|
||||
if err != nil {
|
||||
common.LogError("Cannot find PR for issue:", req.Repository.Owner.Username, req.Repository.Name, int64(req.Issue.Number))
|
||||
|
||||
@@ -103,7 +103,7 @@ func TestOpenPR(t *testing.T) {
|
||||
}
|
||||
// gitea.EXPECT().GetAssociatedPrjGitPR("test", "prjcopy", "test", "testRepo", int64(1)).Return(nil, nil)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil, true)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("test", "testRepo", int64(0)).Return([]*models.PullReview{}, nil)
|
||||
@@ -153,7 +153,7 @@ func TestOpenPR(t *testing.T) {
|
||||
}
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr, false)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr)
|
||||
|
||||
err := pr.Process(event)
|
||||
if err != failedErr {
|
||||
@@ -193,7 +193,7 @@ func TestOpenPR(t *testing.T) {
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("org", "SomeRepo", int64(13)).Return([]*models.PullReview{}, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil, true)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, failedErr)
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile("test", "prjcopy", "branch", "_project").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"path"
|
||||
@@ -42,15 +43,6 @@ func pullRequestToEventState(state models.StateType) string {
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) ProcessPR(pr *models.PullRequest, config *common.AutogitConfig) error {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
common.LogError("panic caught in ProcessPR", common.PRtoString(pr))
|
||||
if err, ok := r.(error); !ok {
|
||||
common.LogError(err)
|
||||
}
|
||||
common.LogError(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
return ProcesPullRequest(pr, common.AutogitConfigs{config})
|
||||
}
|
||||
|
||||
@@ -159,7 +151,7 @@ func (s *DefaultStateChecker) VerifyProjectState(config *common.AutogitConfig) (
|
||||
return PrjGitSubmoduleCheck(config, git, prjGitRepo, submodules)
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) CheckRepos() {
|
||||
func (s *DefaultStateChecker) CheckRepos() error {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
common.LogError("panic caught")
|
||||
@@ -169,6 +161,7 @@ func (s *DefaultStateChecker) CheckRepos() {
|
||||
common.LogError(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
errorList := make([]error, 0, 10)
|
||||
|
||||
for org, configs := range s.processor.configuredRepos {
|
||||
for _, config := range configs {
|
||||
@@ -182,12 +175,12 @@ func (s *DefaultStateChecker) CheckRepos() {
|
||||
prs, err := s.i.VerifyProjectState(config)
|
||||
if err != nil {
|
||||
common.LogError(" *** verification failed, org:", org, err)
|
||||
errorList = append(errorList, err)
|
||||
}
|
||||
for _, pr := range prs {
|
||||
prs, err := Gitea.GetRecentPullRequests(pr.Org, pr.Repo, pr.Branch)
|
||||
if err != nil {
|
||||
common.LogError("Error fetching pull requests for", fmt.Sprintf("%s/%s#%s", pr.Org, pr.Repo, pr.Branch), err)
|
||||
break
|
||||
return fmt.Errorf("Error fetching pull requests for %s/%s#%s. Err: %w", pr.Org, pr.Repo, pr.Branch, err)
|
||||
}
|
||||
if len(prs) > 0 {
|
||||
common.LogDebug(fmt.Sprintf("%s/%s#%s", pr.Org, pr.Repo, pr.Branch), " - # of PRs to check:", len(prs))
|
||||
@@ -200,11 +193,9 @@ func (s *DefaultStateChecker) CheckRepos() {
|
||||
|
||||
common.LogInfo(" ++ verification complete, org:", org, "config:", config.GitProjectName)
|
||||
}
|
||||
|
||||
if len(configs) == 0 {
|
||||
common.LogError(" org:", org, "has 0 configs?")
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Join(errorList...)
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) ConsistencyCheckProcess() error {
|
||||
|
||||
Reference in New Issue
Block a user