forked from adamm/autogits
Compare commits
174 Commits
0.0.1
...
build-dire
Author | SHA256 | Date | |
---|---|---|---|
|
a6a07f5cd5
|
||
78eb9f11e5 | |||
c28f28e852 | |||
72270c57ed | |||
5d6dc75400 | |||
20b02d903c | |||
58dc4927c2 | |||
ce48cbee72 | |||
3bd179bee1 | |||
940e5be2c1 | |||
4a4113aad7 | |||
3ee939db1d | |||
00f4e11f02 | |||
635bdd0f50 | |||
82f7a186a9 | |||
030fa43404 | |||
2ad9f6c179 | |||
80952913c9 | |||
1ce38c9de2 | |||
bbb721c6fa | |||
a50d238715 | |||
463a6e3236 | |||
91ecf88a38 | |||
4f9a99d232 | |||
02d3a2e159 | |||
03370871c4 | |||
1f4e1ac35e | |||
debbee17eb | |||
c63a56bc4e | |||
568346ce3d | |||
a010618764 | |||
a80e04065f | |||
72c2967d1f | |||
1cacb914b4 | |||
517ecbb68a | |||
e1313105d1 | |||
5b84f9d5ce | |||
7c254047a8 | |||
fffdce2c58 | |||
4014747712 | |||
b49df4845a | |||
9bac2e924c | |||
ee6d704e1e | |||
bfeaed40d5 | |||
4dd864c7b6 | |||
205741dde1 | |||
a5acc1e34e | |||
fc2dbab782 | |||
9236fa3ff1 | |||
334fe5553e | |||
9418b33c6c | |||
7a8c84d1a6 | |||
367d606870 | |||
682397975f | |||
b4a1c5dc01 | |||
1c38c2105b | |||
072d7b4825 | |||
9ecda0c58b | |||
8c2cc51a3c | |||
2f38e559d1 | |||
61d9359ce3 | |||
d46ca05346 | |||
a84d55c858 | |||
2cd7307291 | |||
efde2fad69 | |||
e537e5d00c | |||
adffc67ca0 | |||
f0b184f4c3 | |||
656a3bacdf | |||
c0c467d72b | |||
dbee0e8bd3 | |||
c7723fce2d | |||
12a641c86f | |||
73e817d408 | |||
6aa53bdf25 | |||
d5dbb37e18 | |||
5108019db0 | |||
6fc0607823 | |||
c1df08dc59 | |||
92747f0913 | |||
f77e35731c | |||
b9e70132ae | |||
245181ad28 | |||
fbaeddfcd8 | |||
e63a450c5d | |||
8ab35475fc | |||
69776dc5dc | |||
cfe15a0551 | |||
888582a74a | |||
72d5f64f90 | |||
fe2a577b3b | |||
ac6fb96534 | |||
f6bd0c10c0 | |||
50aab4c662 | |||
8c6180a8cf | |||
044241c71e | |||
e057cdf0d3 | |||
7ccbd1deb2 | |||
68ba45ca9c | |||
a7d81d6013 | |||
5f00b10f35 | |||
7433ac1d3a | |||
db766bacc3 | |||
77751ecc46 | |||
a025328fef | |||
0c866e8f89 | |||
2d12899da5 | |||
f4462190c9 | |||
7342dc42e9 | |||
60c0a118c9 | |||
cf101ef3f0 | |||
0331346025 | |||
21f7da2257 | |||
2916ec8da5 | |||
2bc9830a7a | |||
f281986c8f | |||
e56f444960 | |||
b96c4d26ca | |||
2949e23b11 | |||
1d7d0a7b43 | |||
e8e51e21ca | |||
dc96392b40 | |||
c757b50c65 | |||
0a7978569e | |||
463e3e198b | |||
8bedcc5195 | |||
0d9451e92c | |||
a230c2aa52 | |||
0f6cb392d6 | |||
48a889b353 | |||
a672bb85fb | |||
6ecc4ecb3a | |||
881cba862f | |||
77bdf7649a | |||
a0a79dcf4d | |||
3d7336a3a0 | |||
bbdd9eb0be | |||
c48ff699f4 | |||
27014958be | |||
5027e98c04 | |||
e2498afc4d | |||
7234811edc | |||
4692cfbe6f | |||
464e807747 | |||
76f2ae8aec | |||
a0b65ea8f4 | |||
5de077610c | |||
d7bbe5695c | |||
86df1921e0 | |||
9de8cf698f | |||
c955811373 | |||
530318a35b | |||
798f96e364 | |||
11bf2aafcd | |||
3a2c590158 | |||
a47d217ab3 | |||
6b40bf7bbf | |||
d36c0c407f | |||
e71e6f04e8 | |||
7e663964ee | |||
7940a8cc86 | |||
edab8aa9dd | |||
a552f751f0 | |||
b7ec9a9ffb | |||
b0b39726b8 | |||
06228c58f3 | |||
d828467d25 | |||
dd316e20b7 | |||
937664dfba | |||
4c8eae5e7c | |||
c61d648294 | |||
630803246c | |||
69b9e41129 | |||
f8ad932e33 |
21
.gitattributes
vendored
Normal file
21
.gitattributes
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.bsp filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.gem filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.jar filter=lfs diff=lfs merge=lfs -text
|
||||
*.lz filter=lfs diff=lfs merge=lfs -text
|
||||
*.lzma filter=lfs diff=lfs merge=lfs -text
|
||||
*.oxt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pdf filter=lfs diff=lfs merge=lfs -text
|
||||
*.png filter=lfs diff=lfs merge=lfs -text
|
||||
*.rpm filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.ttf filter=lfs diff=lfs merge=lfs -text
|
||||
*.txz filter=lfs diff=lfs merge=lfs -text
|
||||
*.whl filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*.changes merge=merge-changes
|
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
mock
|
||||
node_modules
|
||||
*.obscpio
|
||||
autogits-tmp.tar.zst
|
||||
*.osc
|
12
README.md
12
README.md
@@ -3,20 +3,18 @@ AutoGits
|
||||
|
||||
The bots that drive Git Workflow for package management
|
||||
|
||||
* devel-importer -- helper to import an OBS devel project into a Gitea organization
|
||||
* devel-importer -- helper to import an OBS devel project into a Gitea organization
|
||||
* gitea-events-rabbitmq-publisher -- takes all events from a Gitea organization (webhook) and publishes it on a RabbitMQ instance
|
||||
* maintainer-and-policy-bot -- review bot to make sure maintainer signed off on reviews, along with necessary other entities
|
||||
* obs-staging-bot -- build bot for a PR
|
||||
* obs-staging-bot -- build bot for a PR
|
||||
* obs-status-service -- report build status of an OBS project as an SVG
|
||||
* pr-review -- keeps PR to _ObsPrj consistent with a PR to a package update
|
||||
* prjgit-updater -- update _ObsPrj based on direct pushes and repo creations/removals from organization
|
||||
* staging-utils -- review tooling for PR
|
||||
* workflow-pr -- keeps PR to _ObsPrj consistent with a PR to a package update
|
||||
* workflow-direct -- update _ObsPrj based on direct pushes and repo creations/removals from organization
|
||||
* staging-utils -- review tooling for PR
|
||||
- list PR
|
||||
- merge PR
|
||||
- split PR
|
||||
- diff PR
|
||||
- accept/reject PR
|
||||
* random -- random utils and tools
|
||||
|
||||
Bugs
|
||||
----
|
||||
|
16
_service
Normal file
16
_service
Normal file
@@ -0,0 +1,16 @@
|
||||
<services>
|
||||
<!-- workaround, go_modules needs a tar and obs_scm doesn't take file://. -->
|
||||
<service name="roast" mode="manual">
|
||||
<param name="target">.</param>
|
||||
<param name="reproducible">true</param>
|
||||
<param name="outfile">autogits-tmp.tar.zst</param>
|
||||
<param name="exclude">autogits-tmp.tar.zst</param>
|
||||
</service>
|
||||
<service name="go_modules" mode="manual">
|
||||
<param name="basename">./</param>
|
||||
<param name="compression">zst</param>
|
||||
<param name="subdir">gitea-events-rabbitmq-publisher</param>
|
||||
<param name="vendorname">vendor-gitea-events-rabbitmq-publisher</param>
|
||||
</service>
|
||||
</services>
|
||||
|
10
autogits.changes
Normal file
10
autogits.changes
Normal file
@@ -0,0 +1,10 @@
|
||||
-------------------------------------------------------------------
|
||||
Wed Sep 11 16:00:58 UTC 2024 - Adam Majer <adam.majer@suse.de>
|
||||
|
||||
- enable Authorization bearer token checks
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Sep 11 14:10:18 UTC 2024 - Adam Majer <adam.majer@suse.de>
|
||||
|
||||
- rabbitmq publisher
|
||||
|
@@ -17,13 +17,12 @@
|
||||
|
||||
|
||||
Name: autogits
|
||||
Version: 0.0.1
|
||||
Version: 0
|
||||
Release: 0
|
||||
Summary: GitWorkflow utilities
|
||||
License: GPL-2.0-or-later
|
||||
URL: https://src.opensuse.org/adamm/autogits/
|
||||
Source: https://src.opensuse.org/adamm/autogits/0.0.1.tar.gz
|
||||
Source1: vendor.tar.zst
|
||||
URL: https://src.opensuse.org/adamm/autogits
|
||||
Source1: vendor-gitea-events-rabbitmq-publisher.tar.zst
|
||||
BuildRequires: golang-packaging
|
||||
BuildRequires: systemd-rpm-macros
|
||||
BuildRequires: zstd
|
||||
@@ -42,7 +41,8 @@ with a topic
|
||||
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
|
||||
|
||||
%prep
|
||||
%autosetup -p1
|
||||
cp -r /home/abuild/rpmbuild/SOURCES/* ./
|
||||
cd gitea-events-rabbitmq-publisher && tar x --zstd -f %{SOURCE1}
|
||||
|
||||
%build
|
||||
go build \
|
||||
@@ -51,8 +51,20 @@ go build \
|
||||
-buildmode=pie
|
||||
|
||||
%install
|
||||
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}
|
||||
install -D -m0755 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}
|
||||
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%pre -n gitea-events-rabbitmq-publisher
|
||||
%service_add_pre gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%post -n gitea-events-rabbitmq-publisher
|
||||
%service_add_post gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%preun -n gitea-events-rabbitmq-publisher
|
||||
%service_del_preun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%postun -n gitea-events-rabbitmq-publisher
|
||||
%service_del_postun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%files -n gitea-events-rabbitmq-publisher
|
||||
%license COPYING
|
||||
|
@@ -7,7 +7,8 @@ gitea-generated/client/gitea_api_client.go:: api.json
|
||||
[ -d gitea-generated ] || mkdir gitea-generated
|
||||
podman run --rm -v $$(pwd):/api ghcr.io/go-swagger/go-swagger generate client -f /api/api.json -t /api/gitea-generated
|
||||
|
||||
api: gitea-generated/client/gitea_api_client.go
|
||||
api: gitea-generated/client/gitea_api_client.go mock_gitea_utils.go
|
||||
go generate
|
||||
|
||||
build: api
|
||||
go build
|
||||
|
118
bots-common/associated_pr_scanner.go
Normal file
118
bots-common/associated_pr_scanner.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const PrPattern = "PR: %s/%s#%d"
|
||||
|
||||
type BasicPR struct {
|
||||
Org, Repo string
|
||||
Num int64
|
||||
}
|
||||
|
||||
var validOrgAndRepoRx *regexp.Regexp = regexp.MustCompile("^[A-Za-z0-9_-]+$")
|
||||
|
||||
func parsePrLine(line string) (BasicPR, error) {
|
||||
var ret BasicPR
|
||||
trimmedLine := strings.TrimSpace(line)
|
||||
|
||||
// min size > 9 -> must fit all parameters in th PrPattern with at least one item per parameter
|
||||
if len(trimmedLine) < 9 || trimmedLine[0:4] != "PR: " {
|
||||
return ret, errors.New("Line too short")
|
||||
}
|
||||
|
||||
trimmedLine = trimmedLine[4:]
|
||||
org := strings.SplitN(trimmedLine, "/", 2)
|
||||
ret.Org = org[0]
|
||||
if len(org) != 2 {
|
||||
return ret, errors.New("missing / separator")
|
||||
}
|
||||
|
||||
repo := strings.SplitN(org[1], "#", 2)
|
||||
ret.Repo = repo[0]
|
||||
if len(repo) != 2 {
|
||||
return ret, errors.New("Missing # separator")
|
||||
}
|
||||
|
||||
// Gitea requires that each org and repo be [A-Za-z0-9_-]+
|
||||
var err error
|
||||
if ret.Num, err = strconv.ParseInt(repo[1], 10, 64); err != nil {
|
||||
return ret, errors.New("Invalid number")
|
||||
}
|
||||
|
||||
if !validOrgAndRepoRx.MatchString(repo[0]) || !validOrgAndRepoRx.MatchString(org[0]) {
|
||||
return ret, errors.New("Invalid repo or org character set")
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func ExtractDescriptionAndPRs(data *bufio.Scanner) (string, []BasicPR) {
|
||||
prs := make([]BasicPR, 0, 1)
|
||||
var desc strings.Builder
|
||||
|
||||
for data.Scan() {
|
||||
line := data.Text()
|
||||
|
||||
pr, err := parsePrLine(line)
|
||||
if err != nil {
|
||||
desc.WriteString(line)
|
||||
desc.WriteByte('\n')
|
||||
} else {
|
||||
prs = append(prs, pr)
|
||||
}
|
||||
}
|
||||
|
||||
return strings.TrimSpace(desc.String()), prs
|
||||
}
|
||||
|
||||
func prToLine(writer io.Writer, pr BasicPR) {
|
||||
writer.Write([]byte("\n"))
|
||||
fmt.Fprintf(writer, PrPattern, pr.Org, pr.Repo, pr.Num)
|
||||
}
|
||||
|
||||
// returns:
|
||||
// <0 for a<b
|
||||
// >0 for a>b
|
||||
// =0 when equal
|
||||
func compareBasicPRs(a BasicPR, b BasicPR) int {
|
||||
if c := strings.Compare(a.Org, b.Org); c != 0 {
|
||||
return c
|
||||
}
|
||||
if c := strings.Compare(a.Repo, b.Repo); c != 0 {
|
||||
return c
|
||||
}
|
||||
|
||||
if a.Num > b.Num {
|
||||
return 1
|
||||
}
|
||||
if a.Num < b.Num {
|
||||
return -1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func AppendPRsToDescription(desc string, prs []BasicPR) string {
|
||||
var out strings.Builder
|
||||
|
||||
out.WriteString(strings.TrimSpace(desc))
|
||||
out.WriteString("\n")
|
||||
|
||||
slices.SortFunc(prs, compareBasicPRs)
|
||||
prs = slices.Compact(prs)
|
||||
|
||||
for _, pr := range prs {
|
||||
prToLine(&out, pr)
|
||||
}
|
||||
|
||||
return out.String()
|
||||
}
|
149
bots-common/associated_pr_scanner_test.go
Normal file
149
bots-common/associated_pr_scanner_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func newStringScanner(s string) *bufio.Scanner {
|
||||
return bufio.NewScanner(strings.NewReader(s))
|
||||
}
|
||||
|
||||
func TestAssociatedPRScanner(t *testing.T) {
|
||||
testTable := []struct {
|
||||
name string
|
||||
input string
|
||||
prs []common.BasicPR
|
||||
desc string
|
||||
}{
|
||||
{
|
||||
"No PRs",
|
||||
"",
|
||||
[]common.BasicPR{},
|
||||
"",
|
||||
},
|
||||
{
|
||||
"Single PRs",
|
||||
"Some header of the issue\n\nFollowed by some description\n\nPR: test/foo#4\n",
|
||||
[]common.BasicPR{{Org: "test", Repo: "foo", Num: 4}},
|
||||
"Some header of the issue\n\nFollowed by some description",
|
||||
},
|
||||
{
|
||||
"Multiple PRs",
|
||||
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo#5\n",
|
||||
[]common.BasicPR{
|
||||
{Org: "test", Repo: "foo", Num: 4},
|
||||
{Org: "test", Repo: "goo", Num: 5},
|
||||
},
|
||||
"Some header of the issue\n\nFollowed by some description",
|
||||
},
|
||||
{
|
||||
"Multiple PRs with whitespace",
|
||||
"Some header of the issue\n\n\tPR: test/goo#5\n\n Followed by some description\n \t PR: test/foo#4\n",
|
||||
[]common.BasicPR{
|
||||
{Org: "test", Repo: "foo", Num: 4},
|
||||
{Org: "test", Repo: "goo", Num: 5},
|
||||
},
|
||||
"Some header of the issue\n\n\n Followed by some description",
|
||||
},
|
||||
{
|
||||
"Multiple PRs with missing names and other special cases to ignore",
|
||||
"Some header of the issue\n\n\n\t PR: foobar#5 \n\t PR: rd/goo5 \n\t PR: test/#5 \n" +
|
||||
"\t PR: /goo#5 \n\t PR: test/goo# \n\t PR: test / goo # 10 \n\tPR: test/gool# 10 \n" +
|
||||
"\t PR: test/goo#5 \n\t\n Followed by some description\n\t PR: test/foo#4 \n\t\n\n",
|
||||
[]common.BasicPR{
|
||||
{
|
||||
Org: "test",
|
||||
Repo: "foo",
|
||||
Num: 4,
|
||||
},
|
||||
{
|
||||
Org: "test",
|
||||
Repo: "goo",
|
||||
Num: 5,
|
||||
},
|
||||
},
|
||||
"Some header of the issue\n\n\n\t PR: foobar#5 \n\t PR: rd/goo5 \n\t PR: test/#5 \n" +
|
||||
"\t PR: /goo#5 \n\t PR: test/goo# \n\t PR: test / goo # 10 \n\tPR: test/gool# 10 \n" +
|
||||
"\t\n Followed by some description",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testTable {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
desc, prs := common.ExtractDescriptionAndPRs(newStringScanner(test.input))
|
||||
if len(prs) != len(test.prs) {
|
||||
t.Error("Unexpected length:", len(prs), "expected:", len(test.prs))
|
||||
return
|
||||
}
|
||||
|
||||
for _, p := range test.prs {
|
||||
if !slices.Contains(prs, p) {
|
||||
t.Error("missing expected PR", p)
|
||||
}
|
||||
}
|
||||
|
||||
if desc != test.desc {
|
||||
t.Error("Desc output", len(desc), "!=", len(test.desc), ":", desc)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppendingPRsToDescription(t *testing.T) {
|
||||
testTable := []struct {
|
||||
name string
|
||||
desc string
|
||||
PRs []common.BasicPR
|
||||
output string
|
||||
}{
|
||||
{
|
||||
"Append single PR to end of description",
|
||||
"something",
|
||||
[]common.BasicPR{
|
||||
{Org: "a", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a/b#100",
|
||||
},
|
||||
{
|
||||
"Append multiple PR to end of description",
|
||||
"something",
|
||||
[]common.BasicPR{
|
||||
{Org: "a1", Repo: "b", Num: 100},
|
||||
{Org: "a1", Repo: "c", Num: 100},
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "b", Repo: "b", Num: 100},
|
||||
{Org: "c", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
},
|
||||
{
|
||||
"Append multiple sorted PR to end of description and remove dups",
|
||||
"something",
|
||||
[]common.BasicPR{
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "a1", Repo: "c", Num: 100},
|
||||
{Org: "c", Repo: "b", Num: 100},
|
||||
{Org: "b", Repo: "b", Num: 100},
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "a1", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testTable {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
d := common.AppendPRsToDescription(test.desc, test.PRs)
|
||||
if d != test.output {
|
||||
t.Error(len(d), "vs", len(test.output))
|
||||
t.Error("unpected output", d)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -22,53 +22,109 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
GitProjectName []string
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
}
|
||||
|
||||
func ReadWorkflowConfigs(reader io.Reader) ([]*AutogitConfig, error) {
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
|
||||
func ReadConfig(reader io.Reader) (*ConfigFile, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error reading config file. err: %w", err)
|
||||
return nil, fmt.Errorf("Error reading config data: %w", err)
|
||||
}
|
||||
|
||||
var config []*AutogitConfig
|
||||
if err = json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing config file. err: %w", err)
|
||||
config := ConfigFile{}
|
||||
if err := json.Unmarshal(data, &config.GitProjectName); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing Git Project paths: %w", err)
|
||||
}
|
||||
|
||||
availableWorkflows := []string{"pr", "direct", "test"}
|
||||
for _, workflow := range config {
|
||||
for _, w := range workflow.Workflows {
|
||||
if !slices.Contains(availableWorkflows, w) {
|
||||
return nil, fmt.Errorf(
|
||||
"Invalid Workflow '%s'. Only available workflows are: %s",
|
||||
w, strings.Join(availableWorkflows, " "),
|
||||
)
|
||||
}
|
||||
}
|
||||
if len(workflow.GitProjectName) == 0 {
|
||||
workflow.GitProjectName = DefaultGitPrj
|
||||
}
|
||||
}
|
||||
|
||||
return config, nil
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadWorkflowConfigsFile(filename string) ([]*AutogitConfig, error) {
|
||||
func ReadConfigFile(filename string) (*ConfigFile, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadWorkflowConfigs(file)
|
||||
return ReadConfig(file)
|
||||
}
|
||||
|
||||
func ReadWorkflowConfig(gitea Gitea, git_project string) (*AutogitConfig, error) {
|
||||
hash := strings.Split(git_project, "#")
|
||||
branch := ""
|
||||
if len(hash) > 1 {
|
||||
branch = hash[1]
|
||||
}
|
||||
|
||||
a := strings.Split(hash[0], "/")
|
||||
prjGitRepo := DefaultGitPrj
|
||||
switch len(a) {
|
||||
case 1:
|
||||
case 2:
|
||||
prjGitRepo = a[1]
|
||||
default:
|
||||
return nil, fmt.Errorf("Missing org/repo in projectgit: %s", git_project)
|
||||
}
|
||||
|
||||
data, _, err := gitea.GetRepositoryFileContent(a[0], prjGitRepo, branch, "workflow.config")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching 'workflow.config': %w", err)
|
||||
}
|
||||
|
||||
var config AutogitConfig
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing config file: %w", err)
|
||||
}
|
||||
|
||||
config.GitProjectName = a[0] + "/" + prjGitRepo
|
||||
if len(branch) > 0 {
|
||||
config.GitProjectName = config.GitProjectName + "#" + branch
|
||||
}
|
||||
if len(config.Organization) < 1 {
|
||||
config.Organization = a[0]
|
||||
}
|
||||
log.Println(config)
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ResolveWorkflowConfigs(gitea Gitea, config *ConfigFile) (AutogitConfigs, error) {
|
||||
configs := make([]*AutogitConfig, 0, len(config.GitProjectName))
|
||||
for _, git_project := range config.GitProjectName {
|
||||
c, err := ReadWorkflowConfig(gitea, git_project)
|
||||
if err != nil {
|
||||
// can't sync, so ignore for now
|
||||
log.Println(err)
|
||||
} else {
|
||||
configs = append(configs, c)
|
||||
}
|
||||
}
|
||||
|
||||
return configs, nil
|
||||
}
|
||||
|
||||
func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *AutogitConfig {
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@@ -24,6 +24,7 @@ const (
|
||||
ObsPasswordEnv = "OBS_PASSWORD"
|
||||
|
||||
DefaultGitPrj = "_ObsPrj"
|
||||
PrjLinksFile = "links.json"
|
||||
GiteaRequestHeader = "X-Gitea-Event-Type"
|
||||
|
||||
Bot_BuildReview = "autogits_obs_staging_bot"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package common
|
||||
package common_test
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
|
@@ -19,18 +19,48 @@ package common
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type GitHandler struct {
|
||||
//go:generate mockgen -source=git_utils.go -destination=mock/git_utils.go -typed
|
||||
|
||||
type GitSubmoduleLister interface {
|
||||
GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error)
|
||||
GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool)
|
||||
}
|
||||
|
||||
type GitStatusLister interface {
|
||||
GitStatus(cwd string) ([]GitStatusData, error)
|
||||
}
|
||||
|
||||
type Git interface {
|
||||
GitParseCommits(cwd string, commitIDs []string) (parsedCommits []GitCommit, err error)
|
||||
GitCatFile(cwd, commitId, filename string) (data []byte, err error)
|
||||
GetPath() string
|
||||
|
||||
GitBranchHead(gitDir, branchName string) (string, error)
|
||||
io.Closer
|
||||
|
||||
GitSubmoduleLister
|
||||
GitStatusLister
|
||||
|
||||
GitExecWithOutputOrPanic(cwd string, params ...string) string
|
||||
GitExecOrPanic(cwd string, params ...string)
|
||||
GitExec(cwd string, params ...string) error
|
||||
GitExecWithOutput(cwd string, params ...string) (string, error)
|
||||
}
|
||||
|
||||
type GitHandlerImpl struct {
|
||||
DebugLogger bool
|
||||
|
||||
GitPath string
|
||||
@@ -38,20 +68,36 @@ type GitHandler struct {
|
||||
GitEmail string
|
||||
}
|
||||
|
||||
func CreateGitHandler(git_author, email, name string) (*GitHandler, error) {
|
||||
var err error
|
||||
func (s *GitHandlerImpl) GetPath() string {
|
||||
return s.GitPath
|
||||
}
|
||||
|
||||
git := new(GitHandler)
|
||||
git.GitCommiter = git_author
|
||||
git.GitPath, err = os.MkdirTemp("", name)
|
||||
type GitHandlerGenerator interface {
|
||||
CreateGitHandler(git_author, email, prjName string) (Git, error)
|
||||
ReadExistingPath(git_author, email, gitPath string) (Git, error)
|
||||
}
|
||||
|
||||
type GitHandlerGeneratorImpl struct{}
|
||||
|
||||
func (s *GitHandlerGeneratorImpl) CreateGitHandler(git_author, email, prj_name string) (Git, error) {
|
||||
gitPath, err := os.MkdirTemp("", prj_name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create temp dir: %w", err)
|
||||
}
|
||||
|
||||
if err = os.Chmod(git.GitPath, 0700); err != nil {
|
||||
if err = os.Chmod(gitPath, 0700); err != nil {
|
||||
return nil, fmt.Errorf("Cannot fix permissions of temp dir: %w", err)
|
||||
}
|
||||
|
||||
return s.ReadExistingPath(git_author, email, gitPath)
|
||||
}
|
||||
|
||||
func (*GitHandlerGeneratorImpl) ReadExistingPath(git_author, email, gitPath string) (Git, error) {
|
||||
git := &GitHandlerImpl{
|
||||
GitCommiter: git_author,
|
||||
GitPath: gitPath,
|
||||
}
|
||||
|
||||
return git, nil
|
||||
}
|
||||
|
||||
@@ -93,97 +139,16 @@ func (refs *GitReferences) addReference(id, branch string) {
|
||||
refs.refs = append(refs.refs, GitReference{Branch: branch, Id: id})
|
||||
}
|
||||
|
||||
func processRefs(gitDir string) ([]GitReference, error) {
|
||||
packedRefsPath := path.Join(gitDir, "packed-refs")
|
||||
stat, err := os.Stat(packedRefsPath)
|
||||
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
id, err := e.GitExecWithOutput(gitDir, "rev-list", "-1", branchName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return "", fmt.Errorf("Can't find default remote branch: %s", branchName)
|
||||
}
|
||||
|
||||
if stat.Size() > 10000 || stat.IsDir() {
|
||||
return nil, fmt.Errorf("Funny business with 'packed-refs' in '%s'", gitDir)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(packedRefsPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var references GitReferences
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
if len(line) < 1 || line[0] == '#' {
|
||||
continue
|
||||
}
|
||||
|
||||
splitLine := strings.Split(line, " ")
|
||||
if len(splitLine) != 2 {
|
||||
return nil, fmt.Errorf("Unexpected packaged-refs entry '%#v' in '%s'", splitLine, packedRefsPath)
|
||||
}
|
||||
id, ref := splitLine[0], splitLine[1]
|
||||
const remoteRefPrefix = "refs/remotes/origin/"
|
||||
if ref[0:len(remoteRefPrefix)] != remoteRefPrefix {
|
||||
continue
|
||||
}
|
||||
|
||||
references.addReference(id, ref[len(remoteRefPrefix):])
|
||||
}
|
||||
|
||||
return references.refs, nil
|
||||
return strings.TrimSpace(id), nil
|
||||
}
|
||||
|
||||
func findGitDir(p string) (string, error) {
|
||||
gitFile := path.Join(p, ".git")
|
||||
stat, err := os.Stat(gitFile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if stat.IsDir() {
|
||||
return path.Join(p, ".git"), nil
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(gitFile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
refs := strings.Split(line, ":")
|
||||
if len(refs) != 2 {
|
||||
return "", fmt.Errorf("Unknown format of .git file: '%s'\n", line)
|
||||
}
|
||||
|
||||
if refs[0] != "gitdir" {
|
||||
return "", fmt.Errorf("Unknown header of .git file: '%s'\n", refs[0])
|
||||
}
|
||||
|
||||
return path.Join(p, strings.TrimSpace(refs[1])), nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("Can't find git subdirectory in '%s'", p)
|
||||
}
|
||||
|
||||
func (e *GitHandler) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
path, err := findGitDir(path.Join(e.GitPath, gitDir))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error identifying gitdir in `%s`: %w", gitDir, err)
|
||||
}
|
||||
|
||||
refs, err := processRefs(path)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Error finding branches (%s): %w\n", branchName, err)
|
||||
}
|
||||
|
||||
for _, ref := range refs {
|
||||
if ref.Branch == branchName {
|
||||
return ref.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("Can't find default remote branch: %s", branchName)
|
||||
}
|
||||
|
||||
func (e *GitHandler) Close() error {
|
||||
func (e *GitHandlerImpl) Close() error {
|
||||
if err := os.RemoveAll(e.GitPath); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -207,7 +172,28 @@ func (h writeFunc) Close() error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *GitHandler) GitExec(cwd string, params ...string) error {
|
||||
func (e *GitHandlerImpl) GitExecWithOutputOrPanic(cwd string, params ...string) string {
|
||||
out, err := e.GitExecWithOutput(cwd, params...)
|
||||
if err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExecOrPanic(cwd string, params ...string) {
|
||||
if err := e.GitExec(cwd, params...); err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExec(cwd string, params ...string) error {
|
||||
_, err := e.GitExecWithOutput(cwd, params...)
|
||||
return err
|
||||
}
|
||||
|
||||
var ExtraGitParams []string
|
||||
|
||||
func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string, error) {
|
||||
cmd := exec.Command("/usr/bin/git", params...)
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
@@ -218,6 +204,9 @@ func (e *GitHandler) GitExec(cwd string, params ...string) error {
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
|
||||
}
|
||||
if len(ExtraGitParams) > 0 {
|
||||
cmd.Env = append(cmd.Env, ExtraGitParams...)
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdin = nil
|
||||
|
||||
@@ -232,10 +221,10 @@ func (e *GitHandler) GitExec(cwd string, params ...string) error {
|
||||
if e.DebugLogger {
|
||||
log.Printf(" *** error: %v\n", err)
|
||||
}
|
||||
return fmt.Errorf("error executing: git %#v \n%s\n err: %w", cmd.Args, out, err)
|
||||
return "", fmt.Errorf("error executing: git %#v \n%s\n err: %w", cmd.Args, out, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
type ChanIO struct {
|
||||
@@ -273,18 +262,18 @@ func (c *ChanIO) Read(data []byte) (idx int, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
type gitMsg struct {
|
||||
type GitMsg struct {
|
||||
hash string
|
||||
itemType string
|
||||
size int
|
||||
}
|
||||
|
||||
type commit struct {
|
||||
type GitCommit struct {
|
||||
Tree string
|
||||
Msg string
|
||||
}
|
||||
|
||||
type tree_entry struct {
|
||||
type GitTreeEntry struct {
|
||||
name string
|
||||
mode int
|
||||
hash string
|
||||
@@ -292,23 +281,23 @@ type tree_entry struct {
|
||||
size int
|
||||
}
|
||||
|
||||
type tree struct {
|
||||
items []tree_entry
|
||||
type GitTree struct {
|
||||
items []GitTreeEntry
|
||||
}
|
||||
|
||||
func (t *tree_entry) isSubmodule() bool {
|
||||
func (t *GitTreeEntry) isSubmodule() bool {
|
||||
return (t.mode & 0170000) == 0160000
|
||||
}
|
||||
|
||||
func (t *tree_entry) isTree() bool {
|
||||
func (t *GitTreeEntry) isTree() bool {
|
||||
return (t.mode & 0170000) == 0040000
|
||||
}
|
||||
|
||||
func (t *tree_entry) isBlob() bool {
|
||||
func (t *GitTreeEntry) isBlob() bool {
|
||||
return !t.isTree() && !t.isSubmodule()
|
||||
}
|
||||
|
||||
func parseGitMsg(data <-chan byte) (gitMsg, error) {
|
||||
func parseGitMsg(data <-chan byte) (GitMsg, error) {
|
||||
var id []byte = make([]byte, 64)
|
||||
var msgType []byte = make([]byte, 16)
|
||||
var size int
|
||||
@@ -319,7 +308,7 @@ func parseGitMsg(data <-chan byte) (gitMsg, error) {
|
||||
id[pos] = c
|
||||
pos++
|
||||
} else {
|
||||
return gitMsg{}, fmt.Errorf("Invalid character during object hash parse '%c' at %d", c, pos)
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object hash parse '%c' at %d", c, pos)
|
||||
}
|
||||
}
|
||||
id = id[:pos]
|
||||
@@ -331,7 +320,7 @@ func parseGitMsg(data <-chan byte) (gitMsg, error) {
|
||||
msgType[pos] = c
|
||||
pos++
|
||||
} else {
|
||||
return gitMsg{}, fmt.Errorf("Invalid character during object type parse '%c' at %d", c, pos)
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object type parse '%c' at %d", c, pos)
|
||||
}
|
||||
}
|
||||
msgType = msgType[:pos]
|
||||
@@ -341,26 +330,26 @@ func parseGitMsg(data <-chan byte) (gitMsg, error) {
|
||||
break
|
||||
case "missing":
|
||||
if c != '\x00' {
|
||||
return gitMsg{}, fmt.Errorf("Missing format weird")
|
||||
return GitMsg{}, fmt.Errorf("Missing format weird")
|
||||
}
|
||||
return gitMsg{
|
||||
return GitMsg{
|
||||
hash: string(id[:]),
|
||||
itemType: "missing",
|
||||
size: 0,
|
||||
}, fmt.Errorf("Object not found: '%s'", string(id))
|
||||
default:
|
||||
return gitMsg{}, fmt.Errorf("Invalid object type: '%s'", string(msgType))
|
||||
return GitMsg{}, fmt.Errorf("Invalid object type: '%s'", string(msgType))
|
||||
}
|
||||
|
||||
for c = <-data; c != '\000'; c = <-data {
|
||||
if c >= '0' && c <= '9' {
|
||||
size = size*10 + (int(c) - '0')
|
||||
} else {
|
||||
return gitMsg{}, fmt.Errorf("Invalid character during object size parse: '%c'", c)
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object size parse: '%c'", c)
|
||||
}
|
||||
}
|
||||
|
||||
return gitMsg{
|
||||
return GitMsg{
|
||||
hash: string(id[:]),
|
||||
itemType: string(msgType),
|
||||
size: size,
|
||||
@@ -400,20 +389,20 @@ func parseGitCommitMsg(data <-chan byte, l int) (string, error) {
|
||||
return string(msg), nil
|
||||
}
|
||||
|
||||
func parseGitCommit(data <-chan byte) (commit, error) {
|
||||
func parseGitCommit(data <-chan byte) (GitCommit, error) {
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return commit{}, err
|
||||
return GitCommit{}, err
|
||||
} else if hdr.itemType != "commit" {
|
||||
return commit{}, fmt.Errorf("expected commit but parsed %s", hdr.itemType)
|
||||
return GitCommit{}, fmt.Errorf("expected commit but parsed %s", hdr.itemType)
|
||||
}
|
||||
|
||||
var c commit
|
||||
var c GitCommit
|
||||
l := hdr.size
|
||||
for {
|
||||
hdr, err := parseGitCommitHdr(data)
|
||||
if err != nil {
|
||||
return commit{}, nil
|
||||
return GitCommit{}, nil
|
||||
}
|
||||
|
||||
if len(hdr[0])+len(hdr[1]) == 0 { // hdr end marker
|
||||
@@ -433,8 +422,8 @@ func parseGitCommit(data <-chan byte) (commit, error) {
|
||||
return c, err
|
||||
}
|
||||
|
||||
func parseTreeEntry(data <-chan byte, hashLen int) (tree_entry, error) {
|
||||
var e tree_entry
|
||||
func parseTreeEntry(data <-chan byte, hashLen int) (GitTreeEntry, error) {
|
||||
var e GitTreeEntry
|
||||
|
||||
for c := <-data; c != ' '; c = <-data {
|
||||
e.mode = e.mode*8 + int(c-'0')
|
||||
@@ -463,20 +452,20 @@ func parseTreeEntry(data <-chan byte, hashLen int) (tree_entry, error) {
|
||||
return e, nil
|
||||
}
|
||||
|
||||
func parseGitTree(data <-chan byte) (tree, error) {
|
||||
func parseGitTree(data <-chan byte) (GitTree, error) {
|
||||
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return tree{}, err
|
||||
return GitTree{}, err
|
||||
}
|
||||
|
||||
// max capacity to length of hash
|
||||
t := tree{items: make([]tree_entry, 0, hdr.size/len(hdr.hash))}
|
||||
t := GitTree{items: make([]GitTreeEntry, 0, hdr.size/len(hdr.hash))}
|
||||
parsedLen := 0
|
||||
for parsedLen < hdr.size {
|
||||
entry, err := parseTreeEntry(data, len(hdr.hash)/2)
|
||||
if err != nil {
|
||||
return tree{}, nil
|
||||
return GitTree{}, nil
|
||||
}
|
||||
|
||||
t.items = append(t.items, entry)
|
||||
@@ -513,8 +502,56 @@ func parseGitBlob(data <-chan byte) ([]byte, error) {
|
||||
return d, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitParseCommits(cwd string, commitIDs []string) (parsedCommits []GitCommit, err error) {
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
parsedCommits = make([]GitCommit, 0, len(commitIDs))
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
for _, id := range commitIDs {
|
||||
data_out.Write([]byte(id))
|
||||
data_out.ch <- '\x00'
|
||||
c, e := parseGitCommit(data_in.ch)
|
||||
if e != nil {
|
||||
err = fmt.Errorf("Error parsing git commit: %w", e)
|
||||
return
|
||||
}
|
||||
|
||||
parsedCommits = append(parsedCommits, c)
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: support sub-trees
|
||||
func (e *GitHandler) GitCatFile(cwd, commitId, filename string) (data []byte, err error) {
|
||||
func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte, err error) {
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
@@ -557,67 +594,7 @@ func (e *GitHandler) GitCatFile(cwd, commitId, filename string) (data []byte, er
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Printf(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
}
|
||||
|
||||
// return (filename) -> (hash) map for all submodules
|
||||
// TODO: recursive? map different orgs, not just assume '.' for path
|
||||
func (e *GitHandler) GitSubmoduleList(cwd, commitId string) (submoduleList map[string]string, err error) {
|
||||
var done sync.Mutex
|
||||
submoduleList = make(map[string]string)
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
var c commit
|
||||
c, err = parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
var tree tree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git tree: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isSubmodule() {
|
||||
submoduleList[te.name] = te.hash
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
@@ -635,17 +612,72 @@ func (e *GitHandler) GitSubmoduleList(cwd, commitId string) (submoduleList map[s
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return submoduleList, err
|
||||
return
|
||||
}
|
||||
|
||||
func (e *GitHandler) GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
commitId = ""
|
||||
valid = false
|
||||
// return (filename) -> (hash) map for all submodules
|
||||
// TODO: recursive? map different orgs, not just assume '.' for path
|
||||
func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error) {
|
||||
var done sync.Mutex
|
||||
submoduleList = make(map[string]string)
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
var c GitCommit
|
||||
c, err = parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
var tree GitTree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git tree: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isSubmodule() {
|
||||
submoduleList[te.name] = te.hash
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, gitPath)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return submoduleList, err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool) {
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
var wg sync.WaitGroup
|
||||
|
||||
@@ -656,6 +688,14 @@ func (e *GitHandler) GitSubmoduleCommitId(cwd, packageName, commitId string) (su
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
subCommitId = "wrong"
|
||||
commitId = "ok"
|
||||
valid = false
|
||||
}
|
||||
}()
|
||||
|
||||
defer wg.Done()
|
||||
defer close(data_out.ch)
|
||||
|
||||
@@ -684,6 +724,7 @@ func (e *GitHandler) GitSubmoduleCommitId(cwd, packageName, commitId string) (su
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
@@ -703,3 +744,182 @@ func (e *GitHandler) GitSubmoduleCommitId(cwd, packageName, commitId string) (su
|
||||
wg.Wait()
|
||||
return subCommitId, len(subCommitId) == len(commitId)
|
||||
}
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
}
|
||||
|
||||
func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGitStatusString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 7); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) ([]GitStatusData, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
if e.DebugLogger {
|
||||
log.Println("getting git-status()")
|
||||
}
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "status", "--porcelain=2", "-z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
log.Println(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("Error running command %v, err: %v", cmd.Args, err)
|
||||
}
|
||||
|
||||
return parseGitStatusData(bufio.NewReader(bytes.NewReader(out)))
|
||||
}
|
||||
|
@@ -19,9 +19,12 @@ package common
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
@@ -259,7 +262,7 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
|
||||
t.Run("reads HEAD and parses the tree", func(t *testing.T) {
|
||||
const nodejs21 = "c678c57007d496a98bec668ae38f2c26a695f94af78012f15d044ccf066ccb41"
|
||||
h := GitHandler{
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
id, ok := h.GitSubmoduleCommitId("", "nodejs21", commitId)
|
||||
@@ -272,7 +275,7 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("reads README.md", func(t *testing.T) {
|
||||
h := GitHandler{
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
data, err := h.GitCatFile("", commitId, "README.md")
|
||||
@@ -285,7 +288,7 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("read HEAD", func(t *testing.T) {
|
||||
h := GitHandler{
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
|
||||
@@ -302,3 +305,110 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
t.Run("try to parse unknown item", func(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestGitStatusParse(t *testing.T) {
|
||||
testData := []struct {
|
||||
name string
|
||||
data []byte
|
||||
res []GitStatusData
|
||||
}{
|
||||
{
|
||||
name: "Single modified line",
|
||||
data: []byte("1 .M N... 100644 100644 100644 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 bots-common/git_utils.go\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "bots-common/git_utils.go",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Untracked entries",
|
||||
data: []byte("1 .M N... 100644 100644 100644 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 bots-common/git_utils.go\x00? bots-common/c.out\x00? doc/Makefile\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "bots-common/git_utils.go",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
{
|
||||
Path: "bots-common/c.out",
|
||||
Status: GitStatus_Untracked,
|
||||
},
|
||||
{
|
||||
Path: "doc/Makefile",
|
||||
Status: GitStatus_Untracked,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Untracked entries",
|
||||
data: []byte("1 .M N... 100644 100644 100644 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 dbe4b3d5a0a2e385f78fd41d726baa20e9190f7b5a2e78cbd4885586832f39e7 bots-common/git_utils.go\x00? bots-common/c.out\x00! doc/Makefile\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "bots-common/git_utils.go",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
{
|
||||
Path: "bots-common/c.out",
|
||||
Status: GitStatus_Untracked,
|
||||
},
|
||||
{
|
||||
Path: "doc/Makefile",
|
||||
Status: GitStatus_Ignored,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Nothing",
|
||||
},
|
||||
{
|
||||
name: "Unmerged .gitmodules during a merge",
|
||||
data: []byte("1 A. S... 000000 160000 160000 0000000000000000000000000000000000000000000000000000000000000000 ed07665aea0522096c88a7555f1fa9009ed0e0bac92de4613c3479516dd3d147 pkgB2\x00u UU N... 100644 100644 100644 100644 587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76 d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c 087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33 .gitmodules\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "pkgB2",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Unmerged,
|
||||
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Renamed file",
|
||||
data: []byte("1 M. N... 100644 100644 100644 d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c 896cd09f36d39e782d66ae32dd5614d4f4d83fc689f132aab2dfc019a9f5b6f3 .gitmodules\x002 R. S... 160000 160000 160000 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 R100 pkgQ\x00pkgC\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "pkgQ",
|
||||
Status: GitStatus_Renamed,
|
||||
States: [3]string{"pkgC"},
|
||||
|
||||
},
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testData {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
r, err := parseGitStatusData(bufio.NewReader(bytes.NewReader(test.data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(r) != len(test.res) {
|
||||
t.Fatal("len(r):", len(r), "is not expected", len(test.res))
|
||||
}
|
||||
|
||||
for _, expected := range test.res {
|
||||
if !slices.Contains(r, expected) {
|
||||
t.Fatal("result", r, "doesn't contains expected", expected)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@@ -19,9 +19,11 @@ package common
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
@@ -33,10 +35,17 @@ import (
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/notification"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/organization"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/user"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
const PrPattern = "PR: %s/%s#%d"
|
||||
//go:generate mockgen -source=gitea_utils.go -destination=mock/gitea_utils.go -typed
|
||||
|
||||
// maintainer list file in ProjectGit
|
||||
const (
|
||||
MaintainershipFile = "_maintainership.json"
|
||||
MaintainershipDir = "maintainership"
|
||||
)
|
||||
|
||||
const (
|
||||
// from Gitea
|
||||
@@ -54,12 +63,71 @@ const (
|
||||
ReviewStateUnknown models.ReviewStateType = ""
|
||||
)
|
||||
|
||||
type GiteaMaintainershipReader interface {
|
||||
FetchMaintainershipFile(org, prjGit, branch string) ([]byte, string, error)
|
||||
FetchMaintainershipDirFile(org, prjGit, branch, pkg string) ([]byte, string, error)
|
||||
}
|
||||
|
||||
type GiteaPRFetcher interface {
|
||||
GetPullRequest(org, project string, num int64) (*models.PullRequest, error)
|
||||
GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error)
|
||||
}
|
||||
|
||||
type GiteaReviewFetcher interface {
|
||||
GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaPRChecker interface {
|
||||
GiteaReviewFetcher
|
||||
GiteaMaintainershipReader
|
||||
}
|
||||
|
||||
type GiteaReviewFetcherAndRequester interface {
|
||||
GiteaReviewFetcher
|
||||
GiteaReviewRequester
|
||||
}
|
||||
|
||||
type GiteaReviewRequester interface {
|
||||
RequestReviews(pr *models.PullRequest, reviewer ...string) ([]*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaReviewer interface {
|
||||
AddReviewComment(pr *models.PullRequest, state models.ReviewStateType, comment string) (*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaRepoFetcher interface {
|
||||
GetRepository(org, repo string) (*models.Repository, error)
|
||||
}
|
||||
|
||||
type Gitea interface {
|
||||
GiteaRepoFetcher
|
||||
GiteaReviewRequester
|
||||
GiteaReviewer
|
||||
GiteaPRFetcher
|
||||
GiteaReviewFetcher
|
||||
GiteaMaintainershipReader
|
||||
|
||||
GetPullNotifications(since *time.Time) ([]*models.NotificationThread, error)
|
||||
SetNotificationRead(notificationId int64) error
|
||||
GetOrganization(orgName string) (*models.Organization, error)
|
||||
GetOrganizationRepositories(orgName string) ([]*models.Repository, error)
|
||||
CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error)
|
||||
GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error)
|
||||
GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error)
|
||||
GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error)
|
||||
GetRecentPullRequests(org, repo string) ([]*models.PullRequest, error)
|
||||
GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error)
|
||||
|
||||
GetCurrentUser() (*models.User, error)
|
||||
}
|
||||
|
||||
type GiteaTransport struct {
|
||||
transport *transport.Runtime
|
||||
client *apiclient.GiteaAPI
|
||||
}
|
||||
|
||||
func AllocateGiteaTransport(host string) *GiteaTransport {
|
||||
func AllocateGiteaTransport(host string) Gitea {
|
||||
var r GiteaTransport
|
||||
|
||||
r.transport = transport.New(host, apiclient.DefaultBasePath, [](string){"https"})
|
||||
@@ -70,7 +138,15 @@ func AllocateGiteaTransport(host string) *GiteaTransport {
|
||||
return &r
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequestAndReviews(org, project string, num int64) (*models.PullRequest, []*models.PullReview, error) {
|
||||
func (gitea *GiteaTransport) FetchMaintainershipFile(org, repo, branch string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(org, repo, branch, MaintainershipFile)
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) FetchMaintainershipDirFile(org, repo, branch, pkg string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(org, repo, branch, path.Join(MaintainershipDir, pkg))
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequest(org, project string, num int64) (*models.PullRequest, error) {
|
||||
pr, err := gitea.client.Repository.RepoGetPullRequest(
|
||||
repository.NewRepoGetPullRequestParams().
|
||||
WithDefaults().
|
||||
@@ -80,26 +156,47 @@ func (gitea *GiteaTransport) GetPullRequestAndReviews(org, project string, num i
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
return pr.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepository(org, pkg string) (*models.Repository, error) {
|
||||
repo, err := gitea.client.Repository.RepoGet(repository.NewRepoGetParams().WithDefaults().WithOwner(org).WithRepo(pkg), gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
limit := int64(1000)
|
||||
reviews, err := gitea.client.Repository.RepoListPullReviews(
|
||||
repository.NewRepoListPullReviewsParams().
|
||||
WithDefaults().
|
||||
WithOwner(org).
|
||||
WithRepo(project).
|
||||
WithIndex(num).
|
||||
WithLimit(&limit),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error) {
|
||||
limit := int64(20)
|
||||
var page int64
|
||||
var allReviews []*models.PullReview
|
||||
|
||||
for {
|
||||
reviews, err := gitea.client.Repository.RepoListPullReviews(
|
||||
repository.NewRepoListPullReviewsParams().
|
||||
WithDefaults().
|
||||
WithOwner(org).
|
||||
WithRepo(project).
|
||||
WithIndex(PRnum).
|
||||
WithPage(&page).
|
||||
WithLimit(&limit),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allReviews = slices.Concat(allReviews, reviews.Payload)
|
||||
if len(reviews.Payload) < int(limit) {
|
||||
break
|
||||
}
|
||||
page++
|
||||
}
|
||||
|
||||
return pr.Payload, reviews.Payload, nil
|
||||
return allReviews, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullNotifications(since *time.Time) ([]*models.NotificationThread, error) {
|
||||
@@ -176,9 +273,9 @@ func (gitea *GiteaTransport) GetOrganizationRepositories(orgName string) ([]*mod
|
||||
return repos, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git *GitHandler, org Organization, repoName string) (*models.Repository, error) {
|
||||
func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error) {
|
||||
repo, err := gitea.client.Repository.RepoGet(
|
||||
repository.NewRepoGetParams().WithDefaults().WithOwner(org.Username).WithRepo(repoName),
|
||||
repository.NewRepoGetParams().WithDefaults().WithOwner(org).WithRepo(repoName),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
@@ -191,7 +288,7 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git *GitHandler, org Org
|
||||
Name: &repoName,
|
||||
ObjectFormatName: models.CreateRepoOptionObjectFormatNameSha256,
|
||||
},
|
||||
).WithOrg(org.Username),
|
||||
).WithOrg(org),
|
||||
nil,
|
||||
)
|
||||
|
||||
@@ -200,12 +297,12 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git *GitHandler, org Org
|
||||
case *organization.CreateOrgRepoCreated:
|
||||
// weird, but ok, repo created
|
||||
default:
|
||||
return nil, fmt.Errorf("error creating repo '%s' under '%s'. Err: %w", repoName, org.Username, err)
|
||||
return nil, fmt.Errorf("error creating repo '%s' under '%s'. Err: %w", repoName, org, err)
|
||||
}
|
||||
}
|
||||
|
||||
// initialize repository
|
||||
if err = os.Mkdir(filepath.Join(git.GitPath, DefaultGitPrj), 0700); err != nil {
|
||||
if err = os.Mkdir(filepath.Join(git.GetPath(), DefaultGitPrj), 0700); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "init", "--object-format="+repo.Payload.ObjectFormatName); err != nil {
|
||||
@@ -214,7 +311,7 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git *GitHandler, org Org
|
||||
if err = git.GitExec(DefaultGitPrj, "checkout", "-b", repo.Payload.DefaultBranch); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
readmeFilename := filepath.Join(git.GitPath, DefaultGitPrj, "README.md")
|
||||
readmeFilename := filepath.Join(git.GetPath(), DefaultGitPrj, "README.md")
|
||||
{
|
||||
file, _ := os.Create(readmeFilename)
|
||||
defer file.Close()
|
||||
@@ -233,14 +330,14 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git *GitHandler, org Org
|
||||
|
||||
return repo.Payload, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("cannot fetch repo data for '%s' / '%s' : %w", org.Username, repoName, err)
|
||||
return nil, fmt.Errorf("cannot fetch repo data for %s/%s: %w", org, repoName, err)
|
||||
}
|
||||
}
|
||||
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreatePullRequest(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
prOptions := models.CreatePullRequestOption{
|
||||
Base: repo.DefaultBranch,
|
||||
Head: srcId,
|
||||
@@ -248,6 +345,13 @@ func (gitea *GiteaTransport) CreatePullRequest(repo *models.Repository, srcId, t
|
||||
Body: body,
|
||||
}
|
||||
|
||||
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
|
||||
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(repo.DefaultBranch).WithHead(srcId),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
); err == nil {
|
||||
return pr.Payload, nil
|
||||
}
|
||||
|
||||
pr, err := gitea.client.Repository.RepoCreatePullRequest(
|
||||
repository.
|
||||
NewRepoCreatePullRequestParams().
|
||||
@@ -265,9 +369,49 @@ func (gitea *GiteaTransport) CreatePullRequest(repo *models.Repository, srcId, t
|
||||
return pr.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewer string) ([]*models.PullReview, error) {
|
||||
func (gitea *GiteaTransport) GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error) {
|
||||
var page int64
|
||||
state := "open"
|
||||
for {
|
||||
page++
|
||||
prs, err := gitea.client.Repository.RepoListPullRequests(
|
||||
repository.
|
||||
NewRepoListPullRequestsParams().
|
||||
WithDefaults().
|
||||
WithOwner(prjGitOrg).
|
||||
WithRepo(prjGitRepo).
|
||||
WithState(&state).
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", prjGitOrg, prjGitRepo, err)
|
||||
}
|
||||
|
||||
prLine := fmt.Sprintf(PrPattern, refOrg, refRepo, Index)
|
||||
|
||||
// payload_processing:
|
||||
for _, pr := range prs.Payload {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
if strings.TrimSpace(line) == prLine {
|
||||
return pr, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(prs.Payload) < 10 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewers ...string) ([]*models.PullReview, error) {
|
||||
reviewOptions := models.PullReviewRequestOptions{
|
||||
Reviewers: []string{reviewer},
|
||||
Reviewers: reviewers,
|
||||
}
|
||||
|
||||
review, err := gitea.client.Repository.RepoCreatePullReviewRequests(
|
||||
@@ -281,71 +425,12 @@ func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewer str
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create pull request: %w", err)
|
||||
return nil, fmt.Errorf("Cannot create pull request reviews: %w", err)
|
||||
}
|
||||
|
||||
return review.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) IsReviewed(pr *models.PullRequest) (bool, error) {
|
||||
// TODO: get review from project git
|
||||
reviewers := pr.RequestedReviewers
|
||||
var page, limit int64
|
||||
var reviews []*models.PullReview
|
||||
page = 0
|
||||
limit = 20
|
||||
for {
|
||||
res, err := gitea.client.Repository.RepoListPullReviews(
|
||||
repository.NewRepoListPullReviewsParams().
|
||||
WithOwner(pr.Base.Repo.Owner.UserName).
|
||||
WithRepo(pr.Base.Repo.Name).
|
||||
WithPage(&page).
|
||||
WithLimit(&limit),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if res.IsSuccess() {
|
||||
r := res.Payload
|
||||
|
||||
if reviews == nil {
|
||||
reviews = r
|
||||
} else {
|
||||
reviews = append(reviews, r...)
|
||||
}
|
||||
|
||||
if len(r) < int(limit) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
slices.Reverse(reviews)
|
||||
|
||||
for _, review := range reviews {
|
||||
if review.Stale || review.Dismissed {
|
||||
continue
|
||||
}
|
||||
|
||||
next_review:
|
||||
for i, reviewer := range reviewers {
|
||||
if review.User.UserName == reviewer.UserName {
|
||||
switch review.State {
|
||||
case ReviewStateApproved:
|
||||
reviewers = slices.Delete(reviewers, i, i)
|
||||
break next_review
|
||||
case ReviewStateRequestChanges:
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return len(reviewers) == 0, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) AddReviewComment(pr *models.PullRequest, state models.ReviewStateType, comment string) (*models.PullReview, error) {
|
||||
c, err := gitea.client.Repository.RepoCreatePullReview(
|
||||
repository.NewRepoCreatePullReviewParams().
|
||||
@@ -393,79 +478,80 @@ func (gitea *GiteaTransport) AddReviewComment(pr *models.PullRequest, state mode
|
||||
return c.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetAssociatedPrjGitPR(pr *PullRequestWebhookEvent) (*models.PullRequest, error) {
|
||||
var page, maxSize int64
|
||||
page = 1
|
||||
maxSize = 10000
|
||||
state := "open"
|
||||
prs, err := gitea.client.Repository.RepoListPullRequests(
|
||||
repository.
|
||||
NewRepoListPullRequestsParams().
|
||||
WithDefaults().
|
||||
WithOwner(pr.Repository.Owner.Username).
|
||||
WithRepo(DefaultGitPrj).
|
||||
WithState(&state).
|
||||
WithLimit(&maxSize).
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", pr.Repository.Owner.Username, pr.Repository.Name, err)
|
||||
func (gitea *GiteaTransport) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
|
||||
params := repository.NewRepoGetContentsParams().WithOwner(org).WithRepo(repo).WithFilepath(path)
|
||||
if len(hash) > 0 {
|
||||
params = params.WithRef(&hash)
|
||||
}
|
||||
|
||||
prLine := fmt.Sprintf(PrPattern, pr.Repository.Owner.Username, pr.Repository.Name, pr.Number)
|
||||
// h.StdLogger.Printf("attemping to match line: '%s'\n", prLine)
|
||||
|
||||
// payload_processing:
|
||||
for _, pr := range prs.Payload {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
if strings.TrimSpace(line) == prLine {
|
||||
return pr, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepositoryFileContent(repo *models.Repository, hash, path string) ([]byte, error) {
|
||||
var retData []byte
|
||||
|
||||
dataOut := writeFunc(func(data []byte) (int, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
retData = data
|
||||
return len(data), nil
|
||||
})
|
||||
_, err := gitea.client.Repository.RepoGetRawFile(
|
||||
repository.NewRepoGetRawFileParams().
|
||||
WithOwner(repo.Owner.UserName).
|
||||
WithRepo(repo.Name).
|
||||
WithFilepath(path).
|
||||
WithRef(&hash),
|
||||
content, err := gitea.client.Repository.RepoGetContents(params,
|
||||
gitea.transport.DefaultAuthentication,
|
||||
dataOut,
|
||||
repository.WithContentTypeApplicationOctetStream,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, "", err
|
||||
}
|
||||
if content.Payload.Encoding != "base64" {
|
||||
return nil, "", fmt.Errorf("Unhandled content encoding: %s", content.Payload.Encoding)
|
||||
}
|
||||
|
||||
return retData, nil
|
||||
if content.Payload.Size > 10000000 {
|
||||
return nil, "", fmt.Errorf("Content length is too large for %s/%s/%s#%s - %d bytes", org, repo, path, hash, content.Payload.Size)
|
||||
}
|
||||
|
||||
data := make([]byte, content.Payload.Size)
|
||||
n, err := base64.RawStdEncoding.Decode(data, []byte(content.Payload.Content))
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Error decoding file %s/%s/%s#%s : %w", org, repo, path, hash, err)
|
||||
}
|
||||
if n != int(content.Payload.Size) {
|
||||
return nil, "", fmt.Errorf("Decoded length doesn't match expected for %s/%s/%s#%s - %d vs %d bytes", org, repo, path, hash, content.Payload.Size, n)
|
||||
}
|
||||
|
||||
return data, content.Payload.SHA, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, error) {
|
||||
return gitea.GetRepositoryFileContent(pr.Head.Repo, pr.Head.Sha, path)
|
||||
func (gitea *GiteaTransport) GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(pr.Head.Repo.Owner.UserName, pr.Head.Repo.Name, pr.Head.Sha, path)
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRecentPullRequests(org, repo string) ([]*models.PullRequest, error) {
|
||||
prs := make([]*models.PullRequest, 0, 10)
|
||||
var page int64
|
||||
page = 1
|
||||
sort := "recentupdate"
|
||||
|
||||
for {
|
||||
res, err := gitea.client.Repository.RepoListPullRequests(
|
||||
repository.NewRepoListPullRequestsParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithPage(&page).
|
||||
WithSort(&sort),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
prs = append(prs, res.Payload...)
|
||||
n := len(res.Payload)
|
||||
if n < 10 {
|
||||
break
|
||||
}
|
||||
|
||||
// if pr is closed for more than a week, assume that we are done too
|
||||
if time.Since(time.Time(res.Payload[n-1].Updated)) > 7*24*time.Hour {
|
||||
break
|
||||
}
|
||||
|
||||
page++
|
||||
}
|
||||
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error) {
|
||||
not := false
|
||||
var page int64
|
||||
page = 1
|
||||
var page int64 = 1
|
||||
commits, err := gitea.client.Repository.RepoGetAllCommits(
|
||||
repository.NewRepoGetAllCommitsParams().
|
||||
WithOwner(org).
|
||||
@@ -485,3 +571,16 @@ func (gitea *GiteaTransport) GetRecentCommits(org, repo, branch string, commitNo
|
||||
|
||||
return commits.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetCurrentUser() (*models.User, error) {
|
||||
user, err := gitea.client.User.UserGetCurrent(
|
||||
user.NewUserGetCurrentParams(),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return user.GetPayload(), nil
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
module src.opensuse.org/autogits/common
|
||||
|
||||
go 1.22.3
|
||||
go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/go-openapi/errors v0.22.0
|
||||
@@ -9,6 +9,7 @@ require (
|
||||
github.com/go-openapi/swag v0.23.0
|
||||
github.com/go-openapi/validate v0.24.0
|
||||
github.com/rabbitmq/amqp091-go v1.10.0
|
||||
go.uber.org/mock v0.5.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
@@ -68,6 +68,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
|
@@ -23,6 +23,7 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -53,30 +54,59 @@ const RequestType_PRReviewRequest = "pull_request_review_request"
|
||||
const RequestType_PRReviewComment = "pull_request_review_comment"
|
||||
const RequestType_Wiki = "wiki"
|
||||
|
||||
type RequestProcessor func(*RequestHandler) error
|
||||
type RequestProcessor interface {
|
||||
ProcessFunc(*Request) error
|
||||
}
|
||||
|
||||
type ListenDefinitions struct {
|
||||
RabbitURL string // amqps://user:password@host/queue
|
||||
RabbitURL *url.URL // amqps://user:password@host/queue
|
||||
|
||||
GitAuthor string
|
||||
Handlers map[string]RequestProcessor
|
||||
Orgs []string
|
||||
|
||||
topics []string
|
||||
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
|
||||
}
|
||||
|
||||
type RabbitMessage rabbitmq.Delivery
|
||||
|
||||
func processRabbitMQ(msgCh chan<- RabbitMessage, server url.URL, topics []string) error {
|
||||
queueName := server.Path
|
||||
server.Path = ""
|
||||
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
|
||||
for {
|
||||
topic, ok := <-l.topicSubChanges
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
log.Println(" topic change:", topic)
|
||||
switch topic[0] {
|
||||
case '+':
|
||||
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
case '-':
|
||||
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
default:
|
||||
log.Println("Ignoring topic change.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
queueName := l.RabbitURL.Path
|
||||
l.RabbitURL.Path = ""
|
||||
|
||||
if len(queueName) > 0 && queueName[0] == '/' {
|
||||
queueName = queueName[1:]
|
||||
}
|
||||
|
||||
connection, err := rabbitmq.DialTLS(server.String(), &tls.Config{
|
||||
ServerName: server.Hostname(),
|
||||
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
|
||||
ServerName: l.RabbitURL.Hostname(),
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot connect to %s . Err: %w", server.Hostname(), err)
|
||||
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
|
||||
}
|
||||
defer connection.Close()
|
||||
|
||||
@@ -121,12 +151,12 @@ func processRabbitMQ(msgCh chan<- RabbitMessage, server url.URL, topics []string
|
||||
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
|
||||
|
||||
log.Println(" -- listening to topics:")
|
||||
for _, topic := range topics {
|
||||
err = ch.QueueBind(q.Name, topic, "pubsub", false, nil)
|
||||
log.Println(" +", topic)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot find queue to exchange with topic %s. Err: %w", topic, err)
|
||||
}
|
||||
l.topicSubChanges = make(chan string)
|
||||
defer close(l.topicSubChanges)
|
||||
go l.processTopicChanges(ch, q.Name)
|
||||
|
||||
for _, topic := range l.topics {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
|
||||
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
|
||||
@@ -145,18 +175,18 @@ func processRabbitMQ(msgCh chan<- RabbitMessage, server url.URL, topics []string
|
||||
}
|
||||
}
|
||||
|
||||
func connectAndProcessRabbitMQ(log *log.Logger, ch chan<- RabbitMessage, server url.URL, topics []string) {
|
||||
func (l *ListenDefinitions) connectAndProcessRabbitMQ(log *log.Logger, ch chan<- RabbitMessage) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println(r)
|
||||
log.Println("'crash' RabbitMQ worker. Recovering... reconnecting...")
|
||||
time.Sleep(5 * time.Second)
|
||||
go connectAndProcessRabbitMQ(log, ch, server, topics)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
err := processRabbitMQ(ch, server, topics)
|
||||
err := l.processRabbitMQ(ch)
|
||||
if err != nil {
|
||||
log.Printf("Error in RabbitMQ connection. %#v", err)
|
||||
log.Println("Reconnecting in 2 seconds...")
|
||||
@@ -165,50 +195,85 @@ func connectAndProcessRabbitMQ(log *log.Logger, ch chan<- RabbitMessage, server
|
||||
}
|
||||
}
|
||||
|
||||
func connectToRabbitMQ(log *log.Logger, server url.URL, topics []string) chan RabbitMessage {
|
||||
func (l *ListenDefinitions) connectToRabbitMQ(log *log.Logger) chan RabbitMessage {
|
||||
ch := make(chan RabbitMessage, 100)
|
||||
go connectAndProcessRabbitMQ(log, ch, server, topics)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func ProcessEvent(f RequestProcessor, h *RequestHandler) {
|
||||
func ProcessEvent(f RequestProcessor, request *Request) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println(r)
|
||||
log.Println("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
log.Println(err)
|
||||
}
|
||||
log.Println(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := f(h); err != nil {
|
||||
if err := f.ProcessFunc(request); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func ProcessRabbitMQEvents(listenDefs ListenDefinitions, orgs []string) error {
|
||||
server, err := url.Parse(listenDefs.RabbitURL)
|
||||
if err != nil {
|
||||
log.Panicf("cannot parse server URL. Err: %#v\n", err)
|
||||
func (l *ListenDefinitions) generateTopics() []string {
|
||||
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
|
||||
scope := "suse"
|
||||
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
|
||||
scope = "opensuse"
|
||||
}
|
||||
|
||||
log.Println("RabbitMQ connection:", *server)
|
||||
topics := make([]string, 0, len(listenDefs.Handlers)*len(orgs))
|
||||
log.Println(len(listenDefs.Handlers), len(orgs))
|
||||
|
||||
server.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
|
||||
domain := "suse"
|
||||
if server.Hostname() == "rabbit.opensuse.org" {
|
||||
domain = "opensuse"
|
||||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
for k := range listenDefs.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.gitea.%s.%s.#", domain, org, k))
|
||||
for _, org := range l.Orgs {
|
||||
for requestType, _ := range l.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
|
||||
}
|
||||
}
|
||||
|
||||
ch := connectToRabbitMQ(log.Default(), *server, topics)
|
||||
slices.Sort(topics)
|
||||
return slices.Compact(topics)
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) UpdateTopics() {
|
||||
newTopics := l.generateTopics()
|
||||
|
||||
j := 0
|
||||
next_new_topic:
|
||||
for i := 0; i < len(newTopics); i++ {
|
||||
topic := newTopics[i]
|
||||
|
||||
for j < len(l.topics) {
|
||||
cmp := strings.Compare(topic, l.topics[j])
|
||||
|
||||
if cmp == 0 {
|
||||
j++
|
||||
continue next_new_topic
|
||||
}
|
||||
|
||||
if cmp < 0 {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
break
|
||||
}
|
||||
|
||||
l.topicSubChanges <- "-" + l.topics[j]
|
||||
j++
|
||||
}
|
||||
|
||||
if j == len(l.topics) {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
|
||||
log.Println("RabbitMQ connection:", l.RabbitURL.String())
|
||||
log.Println(len(l.Handlers), len(l.Orgs))
|
||||
|
||||
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
l.topics = l.generateTopics()
|
||||
ch := l.connectToRabbitMQ(log.Default())
|
||||
|
||||
for {
|
||||
msg, ok := <-ch
|
||||
@@ -223,27 +288,27 @@ func ProcessRabbitMQEvents(listenDefs ListenDefinitions, orgs []string) error {
|
||||
reqType := route[3]
|
||||
org := route[2]
|
||||
|
||||
if !slices.Contains(orgs, org) {
|
||||
log.Println("Got even for unhandeled org:", org)
|
||||
if !slices.Contains(l.Orgs, org) {
|
||||
log.Println("Got event for unhandeled org:", org)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Println("org:", org, "type:", reqType)
|
||||
if handler, found := listenDefs.Handlers[reqType]; found {
|
||||
log.Println("handler found", handler)
|
||||
h, err := CreateRequestHandler()
|
||||
if err != nil {
|
||||
log.Printf("Cannot create request handler: %v\n", err)
|
||||
continue
|
||||
}
|
||||
if handler, found := l.Handlers[reqType]; found {
|
||||
/* h, err := CreateRequestHandler()
|
||||
if err != nil {
|
||||
log.Println("Cannot create request handler", err)
|
||||
continue
|
||||
}
|
||||
*/
|
||||
req, err := ParseRequestJSON(reqType, msg.Body)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing request JSON: %v\n", err)
|
||||
log.Println("Error parsing request JSON:", err)
|
||||
continue
|
||||
} else {
|
||||
log.Println("processing req", req.Type)
|
||||
h.Request = req
|
||||
ProcessEvent(handler, h)
|
||||
// h.Request = req
|
||||
ProcessEvent(handler, req)
|
||||
|
||||
}
|
||||
}
|
||||
|
48
bots-common/listen_test.go
Normal file
48
bots-common/listen_test.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
handlers []string
|
||||
orgs1, orgs2 []string
|
||||
|
||||
topicDelta []string
|
||||
}{
|
||||
{
|
||||
name: "no handlers, no orgs",
|
||||
},
|
||||
{
|
||||
name: "adding one org",
|
||||
handlers: []string{"foo"},
|
||||
orgs2: []string{"newOrg"},
|
||||
topicDelta: []string{"+suse"},
|
||||
},
|
||||
}
|
||||
|
||||
u, _ := url.Parse("amqps://rabbit.example.com")
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := ListenDefinitions{
|
||||
Orgs: test.orgs1,
|
||||
Handlers: make(map[string]RequestProcessor),
|
||||
topicSubChanges: make(chan string, len(test.topicDelta)*10),
|
||||
RabbitURL: u,
|
||||
}
|
||||
|
||||
for _, r := range test.handlers {
|
||||
l.Handlers[r] = nil
|
||||
}
|
||||
|
||||
l.UpdateTopics()
|
||||
if len(l.topicSubChanges) != len(test.topicDelta) {
|
||||
t.Fatal("topicSubChanges != topicDelta")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -33,3 +33,9 @@ func CreateStdoutLogger(stdout, stderr io.Writer) (*log.Logger, *log.Logger) {
|
||||
errLogger := log.New(stderr, idStr, log.Lmsgprefix)
|
||||
return stdLogger, errLogger
|
||||
}
|
||||
|
||||
func PanicOnError(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package common
|
||||
package common_test
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
@@ -22,6 +22,8 @@ import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type TestWriter struct {
|
||||
@@ -35,7 +37,7 @@ func TestLogging(t *testing.T) {
|
||||
strWriter = bytes.NewBuffer(make([]byte, 0, 10000))
|
||||
errWriter = bytes.NewBuffer(make([]byte, 0, 10000))
|
||||
|
||||
stdLogger, errLogger := CreateStdoutLogger(strWriter, errWriter)
|
||||
stdLogger, errLogger := common.CreateStdoutLogger(strWriter, errWriter)
|
||||
errLogger.Printf("%d\n", 100)
|
||||
stdLogger.Printf("OKA %d Done\n", 77)
|
||||
stdLogger.Println("Another line")
|
||||
|
200
bots-common/maintainership.go
Normal file
200
bots-common/maintainership.go
Normal file
@@ -0,0 +1,200 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
|
||||
|
||||
type MaintainershipData interface {
|
||||
ListProjectMaintainers() []string
|
||||
ListPackageMaintainers(pkg string) []string
|
||||
|
||||
IsApproved(pkg string, reviews []*models.PullReview) bool
|
||||
}
|
||||
|
||||
const ProjectKey = ""
|
||||
const ProjectFileKey = "_project"
|
||||
|
||||
type MaintainershipMap struct {
|
||||
Data map[string][]string
|
||||
IsDir bool
|
||||
FetchPackage func(string) ([]byte, error)
|
||||
}
|
||||
|
||||
func parseMaintainershipData(data []byte) (*MaintainershipMap, error) {
|
||||
maintainers := &MaintainershipMap{
|
||||
Data: make(map[string][]string),
|
||||
}
|
||||
if err := json.Unmarshal(data, &maintainers.Data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return maintainers, nil
|
||||
}
|
||||
|
||||
func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit, branch string) (*MaintainershipMap, error) {
|
||||
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, ProjectFileKey)
|
||||
dir := true
|
||||
if err != nil || data == nil {
|
||||
dir = false
|
||||
if _, notFound := err.(*repository.RepoGetRawFileNotFound); !notFound {
|
||||
return nil, err
|
||||
}
|
||||
data, _, err = gitea.FetchMaintainershipFile(org, prjGit, branch)
|
||||
if err != nil || data == nil {
|
||||
if _, notFound := err.(*repository.RepoGetRawFileNotFound); !notFound {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// no mainatiners
|
||||
data = []byte("{}")
|
||||
}
|
||||
}
|
||||
|
||||
m, err := parseMaintainershipData(data)
|
||||
if m != nil {
|
||||
m.IsDir = dir
|
||||
m.FetchPackage = func(pkg string) ([]byte, error) {
|
||||
data , _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, pkg)
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
return m, err
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) ListProjectMaintainers() []string {
|
||||
if data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
m, found := data.Data[ProjectKey]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func parsePkgDirData(pkg string, data []byte) []string {
|
||||
m := make(map[string][]string)
|
||||
if err := json.Unmarshal(data, &m); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgMaintainers, found := m[pkg]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
return pkgMaintainers
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) ListPackageMaintainers(pkg string) []string {
|
||||
if data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgMaintainers, found := data.Data[pkg]
|
||||
if !found && data.IsDir {
|
||||
pkgData, err := data.FetchPackage(pkg)
|
||||
if err == nil {
|
||||
pkgMaintainers = parsePkgDirData(pkg, pkgData)
|
||||
if len(pkgMaintainers) > 0 {
|
||||
data.Data[pkg] = pkgMaintainers
|
||||
}
|
||||
}
|
||||
}
|
||||
prjMaintainers := data.ListProjectMaintainers()
|
||||
|
||||
prjMaintainer:
|
||||
for _, prjm := range prjMaintainers {
|
||||
for i := range pkgMaintainers {
|
||||
if pkgMaintainers[i] == prjm {
|
||||
continue prjMaintainer
|
||||
}
|
||||
}
|
||||
pkgMaintainers = append(pkgMaintainers, prjm)
|
||||
}
|
||||
|
||||
return pkgMaintainers
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview) bool {
|
||||
reviewers, found := data.Data[pkg]
|
||||
if !found {
|
||||
if pkg != ProjectKey && data.IsDir {
|
||||
r, err := data.FetchPackage(pkg)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
reviewers = parsePkgDirData(pkg, r)
|
||||
data.Data[pkg] = reviewers
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if len(reviewers) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, review := range reviews {
|
||||
if !review.Stale && review.State == ReviewStateApproved && slices.Contains(reviewers, review.User.UserName) {
|
||||
return true
|
||||
}
|
||||
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) WriteMaintainershipFile(writer io.StringWriter) error {
|
||||
if data.IsDir {
|
||||
return fmt.Errorf("Not implemented")
|
||||
}
|
||||
|
||||
writer.WriteString("{\n")
|
||||
|
||||
if d, ok := data.Data[""]; ok {
|
||||
eol := ","
|
||||
if len(data.Data) == 1 {
|
||||
eol = ""
|
||||
}
|
||||
slices.Sort(d)
|
||||
str, _ := json.Marshal(d)
|
||||
writer.WriteString(fmt.Sprintf(" \"\": %s%s\n", string(str), eol))
|
||||
}
|
||||
|
||||
keys := make([]string, len(data.Data))
|
||||
i := 0
|
||||
for pkg := range data.Data {
|
||||
if pkg == "" {
|
||||
continue
|
||||
}
|
||||
keys[i] = pkg
|
||||
i++
|
||||
}
|
||||
if len(keys) >= i {
|
||||
keys = slices.Delete(keys, i, len(keys))
|
||||
}
|
||||
slices.Sort(keys)
|
||||
for i, pkg := range(keys) {
|
||||
eol := ","
|
||||
if i == len(keys)-1 {
|
||||
eol = ""
|
||||
}
|
||||
maintainers := data.Data[pkg]
|
||||
slices.Sort(maintainers)
|
||||
pkgStr, _ := json.Marshal(pkg)
|
||||
maintainersStr, _ := json.Marshal(maintainers)
|
||||
writer.WriteString(fmt.Sprintf(" %s: %s%s\n", pkgStr, maintainersStr, eol))
|
||||
}
|
||||
|
||||
writer.WriteString("}\n")
|
||||
return nil
|
||||
}
|
237
bots-common/maintainership_test.go
Normal file
237
bots-common/maintainership_test.go
Normal file
@@ -0,0 +1,237 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestMaintainership(t *testing.T) {
|
||||
config := common.AutogitConfig{
|
||||
Branch: "bar",
|
||||
Organization: "foo",
|
||||
GitProjectName: common.DefaultGitPrj,
|
||||
}
|
||||
|
||||
packageTests := []struct {
|
||||
name string
|
||||
maintainers []string
|
||||
otherError bool
|
||||
packageName string
|
||||
|
||||
maintainersFile []byte
|
||||
maintainersFileErr error
|
||||
|
||||
maintainersDir map[string][]byte
|
||||
}{
|
||||
/* PACKAGE MAINTAINERS */
|
||||
// package tests have packageName, projects do not
|
||||
{
|
||||
name: "No maintainer in empty package",
|
||||
packageName: "foo",
|
||||
},
|
||||
{
|
||||
name: "Error in MaintainerListForPackage when remote has an error",
|
||||
maintainersFileErr: errors.New("Some error"), // repository.NewRepoGetRawFileNotFound(),
|
||||
packageName: "foo",
|
||||
},
|
||||
{
|
||||
name: "Multiple package maintainers",
|
||||
maintainersFile: []byte(`{"pkg": ["user1", "user2"], "": ["user1", "user3"]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user1", "user3"]}`),
|
||||
"pkg": []byte(`{"pkg": ["user1", "user2"]}`),
|
||||
},
|
||||
maintainers: []string{"user1", "user2", "user3"},
|
||||
packageName: "pkg",
|
||||
},
|
||||
{
|
||||
name: "No package maintainers and only project maintainer",
|
||||
maintainersFile: []byte(`{"pkg2": ["user1", "user2"], "": ["user1", "user3"]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user1", "user3"]}`),
|
||||
},
|
||||
maintainers: []string{"user1", "user3"},
|
||||
packageName: "pkg",
|
||||
},
|
||||
{
|
||||
name: "Invalid list of package maintainers",
|
||||
maintainersFile: []byte(`{"pkg": 3,"": ["user", 4]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user1", 4]}`),
|
||||
"pkg": []byte(`"pkg": 3`),
|
||||
},
|
||||
otherError: true,
|
||||
packageName: "pkg",
|
||||
},
|
||||
|
||||
/* PROJECT MAINTAINERS */
|
||||
{
|
||||
name: "No maintainer for empty project",
|
||||
},
|
||||
{
|
||||
name: "No maintainer for empty project maintainer file",
|
||||
maintainersFile: []byte("{}"),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{}`),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Error in MaintainerListForProject when remote has an error",
|
||||
maintainersFileErr: errors.New("some error"), //repository.NewRepoGetRawFileNotFound(),
|
||||
},
|
||||
{
|
||||
name: "Multiple project maintainers",
|
||||
maintainersFile: []byte(`{"": ["user1", "user2"]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user1", "user2"]}`),
|
||||
},
|
||||
maintainers: []string{"user1", "user2"},
|
||||
},
|
||||
{
|
||||
name: "Single project maintainer",
|
||||
maintainersFile: []byte(`{"": ["user"]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user"]}`),
|
||||
},
|
||||
maintainers: []string{"user"},
|
||||
},
|
||||
{
|
||||
name: "Invalid list of project maintainers",
|
||||
maintainersFile: []byte(`{"": ["user", 4]}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": ["user", 4]}`),
|
||||
},
|
||||
otherError: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid list of project maintainers",
|
||||
maintainersFile: []byte(`{"": 4}`),
|
||||
maintainersDir: map[string][]byte{
|
||||
"_project": []byte(`{"": 4}`),
|
||||
},
|
||||
otherError: true,
|
||||
},
|
||||
}
|
||||
|
||||
notFoundError := repository.NewRepoGetRawFileNotFound()
|
||||
for _, test := range packageTests {
|
||||
runTests := func(t *testing.T, mi common.GiteaMaintainershipReader) {
|
||||
maintainers, err := common.FetchProjectMaintainershipData(mi, config.Organization, config.GitProjectName, config.Branch)
|
||||
if err != nil && !test.otherError {
|
||||
if test.maintainersFileErr == nil {
|
||||
t.Fatal("Unexpected error recieved", err)
|
||||
} else if err != test.maintainersFileErr {
|
||||
t.Error("Wrong error recieved", err)
|
||||
}
|
||||
} else if test.maintainersFileErr != nil {
|
||||
t.Fatal("Expected an error...")
|
||||
} else if test.otherError && err == nil {
|
||||
t.Fatal("Expected an error...")
|
||||
}
|
||||
|
||||
var m []string
|
||||
if len(test.packageName) > 0 {
|
||||
m = maintainers.ListPackageMaintainers(test.packageName)
|
||||
} else {
|
||||
m = maintainers.ListProjectMaintainers()
|
||||
}
|
||||
|
||||
if len(m) != len(test.maintainers) {
|
||||
t.Error("Invalid number of maintainers for package", test.packageName, len(m), "vs", len(test.maintainers))
|
||||
}
|
||||
for i := range m {
|
||||
if !slices.Contains(test.maintainers, m[i]) {
|
||||
t.Fatal("Can't find expected users. Found:", m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
t.Run(test.name+"_File", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mi := mock_common.NewMockGiteaMaintainershipReader(ctl)
|
||||
|
||||
// tests with maintainership file
|
||||
mi.EXPECT().FetchMaintainershipFile("foo", common.DefaultGitPrj, "bar").
|
||||
Return(test.maintainersFile, "", test.maintainersFileErr)
|
||||
mi.EXPECT().FetchMaintainershipDirFile("foo", common.DefaultGitPrj, "bar", common.ProjectFileKey).
|
||||
Return(nil, "", notFoundError)
|
||||
|
||||
runTests(t, mi)
|
||||
})
|
||||
|
||||
t.Run(test.name+"_Dir", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mi := mock_common.NewMockGiteaMaintainershipReader(ctl)
|
||||
|
||||
// run same tests with directory maintainership data
|
||||
for filename, data := range test.maintainersDir {
|
||||
mi.EXPECT().FetchMaintainershipDirFile("foo", common.DefaultGitPrj, "bar", filename).Return(data, "", test.maintainersFileErr).AnyTimes()
|
||||
}
|
||||
if _, found := test.maintainersDir[common.ProjectFileKey]; !found {
|
||||
mi.EXPECT().FetchMaintainershipDirFile("foo", common.DefaultGitPrj, "bar", common.ProjectFileKey).Return(nil, "", test.maintainersFileErr).AnyTimes()
|
||||
mi.EXPECT().FetchMaintainershipFile("foo", common.DefaultGitPrj, "bar").Return(nil, "", test.maintainersFileErr).AnyTimes()
|
||||
}
|
||||
mi.EXPECT().FetchMaintainershipDirFile("foo", common.DefaultGitPrj, "bar", gomock.Any()).Return(nil, "", notFoundError).AnyTimes()
|
||||
|
||||
runTests(t, mi)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaintainershipFileWrite(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
is_dir bool
|
||||
maintainers map[string][]string
|
||||
expected_output string
|
||||
expected_error error
|
||||
}{
|
||||
{
|
||||
name: "empty dataset",
|
||||
expected_output: "{\n}\n",
|
||||
},
|
||||
{
|
||||
name: "2 project maintainers only",
|
||||
maintainers: map[string][]string{
|
||||
"": {"two", "one"},
|
||||
},
|
||||
expected_output: "{\n \"\": [\"one\",\"two\"]\n}\n",
|
||||
},
|
||||
{
|
||||
name: "2 project maintainers and 2 single package maintainers",
|
||||
maintainers: map[string][]string{
|
||||
"": {"two", "one"},
|
||||
"pkg1": {},
|
||||
"foo": {"four", "byte"},
|
||||
},
|
||||
expected_output: "{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\"],\n \"pkg1\": []\n}\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
b := bytes.Buffer{}
|
||||
data := common.MaintainershipMap{
|
||||
Data: test.maintainers,
|
||||
IsDir: test.is_dir,
|
||||
}
|
||||
|
||||
if err := data.WriteMaintainershipFile(&b); err != test.expected_error {
|
||||
t.Fatal("unexpected error:", err, "Expecting:", test.expected_error)
|
||||
}
|
||||
|
||||
output := b.String()
|
||||
|
||||
if test.expected_output != output {
|
||||
t.Fatal("unexpected output:", output, "Expecting:", test.expected_output)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -61,13 +61,29 @@ type RepositoryPathMeta struct {
|
||||
|
||||
type RepositoryMeta struct {
|
||||
Name string `xml:"name,attr"`
|
||||
BuildTrigger string `xml:"rebuild,attr"`
|
||||
BlockMode string `xml:"block"`
|
||||
LinkedBuild string `xml:"linkedbuild"`
|
||||
BuildTrigger string `xml:"rebuild,attr,omitempty"`
|
||||
BlockMode string `xml:"block,attr,omitempty"`
|
||||
LinkedBuild string `xml:"linkedbuild,attr,omitempty"`
|
||||
Archs []string `xml:"arch"`
|
||||
Paths []RepositoryPathMeta `xml:"path"`
|
||||
}
|
||||
|
||||
type PersonRepoMeta struct {
|
||||
XMLName xml.Name `xml:"person"`
|
||||
UserID string `xml:"userid,attr"`
|
||||
Role string `xml:"role,attr,omitempty"`
|
||||
}
|
||||
|
||||
type PersonGroup struct {
|
||||
XMLName xml.Name `xml:"person"`
|
||||
Persons []PersonRepoMeta `xml:"person"`
|
||||
}
|
||||
|
||||
type GroupRepoMeta struct {
|
||||
GroupID string `xml:"groupid,attr"`
|
||||
Role string `xml:"role,attr"`
|
||||
}
|
||||
|
||||
type Flags struct {
|
||||
Contents string `xml:",innerxml"`
|
||||
}
|
||||
@@ -76,9 +92,11 @@ type ProjectMeta struct {
|
||||
XMLName xml.Name `xml:"project"`
|
||||
Name string `xml:"name,attr"`
|
||||
Title string `xml:"title"`
|
||||
Description string `xml:"description"`
|
||||
Url string `xml:"url"`
|
||||
Description string `xml:"description,omitempty"`
|
||||
Url string `xml:"url,omitempty"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
Persons []PersonRepoMeta `xml:"person"`
|
||||
Groups []GroupRepoMeta `xml:"group"`
|
||||
Repositories []RepositoryMeta `xml:"repository"`
|
||||
|
||||
BuildFlags Flags `xml:"build"`
|
||||
@@ -87,6 +105,29 @@ type ProjectMeta struct {
|
||||
UseForBuild Flags `xml:"useforbuild"`
|
||||
}
|
||||
|
||||
type PackageMeta struct {
|
||||
XMLName xml.Name `xml:"package"`
|
||||
Name string `xml:"name,attr"`
|
||||
Project string `xml:"project,attr"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
Persons []PersonRepoMeta `xml:"person"`
|
||||
Groups []GroupRepoMeta `xml:"group"`
|
||||
}
|
||||
|
||||
type UserMeta struct {
|
||||
XMLName xml.Name `xml:"person"`
|
||||
Login string `xml:"login"`
|
||||
Email string `xml:"email"`
|
||||
Name string `xml:"realname"`
|
||||
State string `xml:"state"`
|
||||
}
|
||||
|
||||
type GroupMeta struct {
|
||||
XMLName xml.Name `xml:"group"`
|
||||
Title string `xml:"title"`
|
||||
Persons PersonGroup `xml:"person"`
|
||||
}
|
||||
|
||||
func parseProjectMeta(data []byte) (*ProjectMeta, error) {
|
||||
var meta ProjectMeta
|
||||
err := xml.Unmarshal(data, &meta)
|
||||
@@ -97,14 +138,82 @@ func parseProjectMeta(data []byte) (*ProjectMeta, error) {
|
||||
return &meta, nil
|
||||
}
|
||||
|
||||
func (c *ObsClient) GetGroupMeta(gid string) (*GroupMeta, error) {
|
||||
req, err := http.NewRequest("GET", c.baseUrl.JoinPath("group", gid).String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetBasicAuth(c.user, c.password)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch res.StatusCode {
|
||||
case 200:
|
||||
break
|
||||
case 404:
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unexpected return code: %d", res.StatusCode)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var meta GroupMeta
|
||||
err = xml.Unmarshal(data, &meta)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &meta, nil
|
||||
}
|
||||
|
||||
func (c *ObsClient) GetUserMeta(uid string) (*UserMeta, error) {
|
||||
req, err := http.NewRequest("GET", c.baseUrl.JoinPath("person", uid).String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetBasicAuth(c.user, c.password)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch res.StatusCode {
|
||||
case 200:
|
||||
break
|
||||
case 404:
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unexpected return code: %d", res.StatusCode)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var meta UserMeta
|
||||
err = xml.Unmarshal(data, &meta)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &meta, nil
|
||||
}
|
||||
|
||||
func (c *ObsClient) GetProjectMeta(project string) (*ProjectMeta, error) {
|
||||
req, err := http.NewRequest("GET", c.baseUrl.JoinPath("source", project, "_meta").String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetBasicAuth(c.user, c.password)
|
||||
log.Printf("request: %#v", *req.URL)
|
||||
log.Printf("headers: %#v", req.Header)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
@@ -128,6 +237,41 @@ func (c *ObsClient) GetProjectMeta(project string) (*ProjectMeta, error) {
|
||||
return parseProjectMeta(data)
|
||||
}
|
||||
|
||||
func (c *ObsClient) GetPackageMeta(project, pkg string) (*PackageMeta, error) {
|
||||
req, err := http.NewRequest("GET", c.baseUrl.JoinPath("source", project, pkg, "_meta").String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetBasicAuth(c.user, c.password)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch res.StatusCode {
|
||||
case 200:
|
||||
break
|
||||
case 404:
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unexpected return code: %d", res.StatusCode)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var meta PackageMeta
|
||||
err = xml.Unmarshal(data, &meta)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &meta, nil
|
||||
}
|
||||
|
||||
func ObsSafeProjectName(prjname string) string {
|
||||
if len(prjname) < 1 {
|
||||
return prjname
|
||||
@@ -149,9 +293,9 @@ func ObsSafeProjectName(prjname string) string {
|
||||
return prjname
|
||||
}
|
||||
|
||||
var ValidBlockModes []string
|
||||
var ValidPrjLinkModes []string
|
||||
var ValidTriggerModes []string
|
||||
var ValidBlockModes []string = []string{"all", "local", "never"}
|
||||
var ValidPrjLinkModes []string = []string{"off", "localdep", "alldirect", "all"}
|
||||
var ValidTriggerModes []string = []string{"transitive", "direct", "local"}
|
||||
|
||||
func (c *ObsClient) SetProjectMeta(meta *ProjectMeta) error {
|
||||
|
||||
@@ -177,7 +321,6 @@ func (c *ObsClient) SetProjectMeta(meta *ProjectMeta) error {
|
||||
return err
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewReader(xml))
|
||||
log.Printf("headers: %#v", req.Header)
|
||||
log.Printf("xml: %s", xml)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
@@ -303,9 +446,6 @@ func (r *BuildResultList) BuildResultSummary() (success, finished bool) {
|
||||
return
|
||||
}
|
||||
|
||||
var ObsBuildStatusDetails map[string]ObsBuildStatusDetail
|
||||
var ObsRepoStatusDetails map[string]ObsBuildStatusDetail
|
||||
|
||||
type ObsBuildStatusDetail struct {
|
||||
Code string
|
||||
Description string
|
||||
@@ -313,134 +453,127 @@ type ObsBuildStatusDetail struct {
|
||||
Success bool
|
||||
}
|
||||
|
||||
func init() {
|
||||
ValidTriggerModes = []string{"transitive", "direct", "local"}
|
||||
ValidBlockModes = []string{"all", "local", "never"}
|
||||
ValidPrjLinkModes = []string{"off", "localdep", "alldirect", "all"}
|
||||
|
||||
ObsBuildStatusDetails = make(map[string]ObsBuildStatusDetail)
|
||||
ObsRepoStatusDetails = make(map[string]ObsBuildStatusDetail)
|
||||
|
||||
// package status
|
||||
ObsBuildStatusDetails["succeeded"] = ObsBuildStatusDetail{
|
||||
var ObsBuildStatusDetails map[string]ObsBuildStatusDetail = map[string]ObsBuildStatusDetail{
|
||||
"succeeded": ObsBuildStatusDetail{
|
||||
Code: "succeeded",
|
||||
Description: "Package has built successfully and can be used to build further packages.",
|
||||
Finished: true,
|
||||
Success: true,
|
||||
}
|
||||
ObsBuildStatusDetails["failed"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"failed": ObsBuildStatusDetail{
|
||||
Code: "failed",
|
||||
Description: "The package does not build successfully. No packages have been created. Packages that depend on this package will be built using any previously created packages, if they exist.",
|
||||
Finished: true,
|
||||
Success: false,
|
||||
}
|
||||
ObsBuildStatusDetails["unresolvable"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"unresolvable": ObsBuildStatusDetail{
|
||||
Code: "unresolvable",
|
||||
Description: "The build can not begin, because required packages are either missing or not explicitly defined.",
|
||||
Finished: true,
|
||||
Success: false,
|
||||
}
|
||||
ObsBuildStatusDetails["broken"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"broken": ObsBuildStatusDetail{
|
||||
Code: "broken",
|
||||
Description: "The sources either contain no build description (e.g. specfile), automatic source processing failed or a merge conflict does exist.",
|
||||
Finished: true,
|
||||
Success: false,
|
||||
}
|
||||
ObsBuildStatusDetails["blocked"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"blocked": ObsBuildStatusDetail{
|
||||
Code: "blocked",
|
||||
Description: "This package waits for other packages to be built. These can be in the same or other projects.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["scheduled"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"scheduled": ObsBuildStatusDetail{
|
||||
Code: "scheduled",
|
||||
Description: "A package has been marked for building, but the build has not started yet.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["dispatching"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"dispatching": ObsBuildStatusDetail{
|
||||
Code: "dispatching",
|
||||
Description: "A package is being copied to a build host. This is an intermediate state before building.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["building"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"building": ObsBuildStatusDetail{
|
||||
Code: "building",
|
||||
Description: "The package is currently being built.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["signing"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"signing": ObsBuildStatusDetail{
|
||||
Code: "signing",
|
||||
Description: "The package has been built successfully and is assigned to get signed.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["finished"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"finished": ObsBuildStatusDetail{
|
||||
Code: "finished",
|
||||
Description: "The package has been built and signed, but has not yet been picked up by the scheduler. This is an intermediate state prior to 'succeeded' or 'failed'.",
|
||||
Finished: false,
|
||||
}
|
||||
ObsBuildStatusDetails["disabled"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"disabled": ObsBuildStatusDetail{
|
||||
Code: "disabled",
|
||||
Description: "The package has been disabled from building in project or package metadata. Packages that depend on this package will be built using any previously created packages, if they still exist.",
|
||||
Finished: true,
|
||||
Success: true,
|
||||
}
|
||||
ObsBuildStatusDetails["excluded"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"excluded": ObsBuildStatusDetail{
|
||||
Code: "excluded",
|
||||
Description: "The package build has been disabled in package build description (for example in the .spec file) or does not provide a matching build description for the target.",
|
||||
Finished: true,
|
||||
Success: true,
|
||||
}
|
||||
ObsBuildStatusDetails["locked"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"locked": ObsBuildStatusDetail{
|
||||
Code: "locked",
|
||||
Description: "The package is frozen",
|
||||
Finished: true,
|
||||
Success: true,
|
||||
}
|
||||
ObsBuildStatusDetails["unknown"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"unknown": ObsBuildStatusDetail{
|
||||
Code: "unknown",
|
||||
Description: "The scheduler has not yet evaluated this package. Should be a short intermediate state for new packages.",
|
||||
Finished: false,
|
||||
}
|
||||
|
||||
},
|
||||
}
|
||||
var ObsRepoStatusDetails map[string]ObsBuildStatusDetail = map[string]ObsBuildStatusDetail{
|
||||
// repo status
|
||||
ObsRepoStatusDetails["published"] = ObsBuildStatusDetail{
|
||||
"published": ObsBuildStatusDetail{
|
||||
Code: "published",
|
||||
Description: "Repository has been published",
|
||||
Finished: true,
|
||||
}
|
||||
ObsRepoStatusDetails["publishing"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"publishing": ObsBuildStatusDetail{
|
||||
Code: "publishing",
|
||||
Description: "Repository is being created right now",
|
||||
Finished: true,
|
||||
}
|
||||
ObsRepoStatusDetails["unpublished"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"unpublished": ObsBuildStatusDetail{
|
||||
Code: "unpublished",
|
||||
Description: "Build finished, but repository publishing is disabled",
|
||||
Finished: true,
|
||||
}
|
||||
ObsRepoStatusDetails["building"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"building": ObsBuildStatusDetail{
|
||||
Code: "building",
|
||||
Description: "Build jobs exist for the repository",
|
||||
Finished: false,
|
||||
}
|
||||
ObsRepoStatusDetails["finished"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"finished": ObsBuildStatusDetail{
|
||||
Code: "finished",
|
||||
Description: "Build jobs have been processed, new repository is not yet created",
|
||||
Finished: true,
|
||||
}
|
||||
ObsRepoStatusDetails["blocked"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"blocked": ObsBuildStatusDetail{
|
||||
Code: "blocked",
|
||||
Description: "No build possible at the moment, waiting for jobs in other repositories",
|
||||
Finished: false,
|
||||
}
|
||||
ObsRepoStatusDetails["broken"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"broken": ObsBuildStatusDetail{
|
||||
Code: "broken",
|
||||
Description: "The repository setup is broken, build or publish not possible",
|
||||
Finished: true,
|
||||
}
|
||||
ObsRepoStatusDetails["scheduling"] = ObsBuildStatusDetail{
|
||||
},
|
||||
"scheduling": ObsBuildStatusDetail{
|
||||
Code: "scheduling",
|
||||
Description: "The repository state is being calculated right now",
|
||||
Finished: false,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func parseBuildResults(data []byte) (*BuildResultList, error) {
|
||||
@@ -461,6 +594,24 @@ func (obs ObsProjectNotFound) Error() string {
|
||||
return fmt.Sprintf("OBS project is not found: %s", obs.Project)
|
||||
}
|
||||
|
||||
func (c *ObsClient) ProjectConfig(project string) (string, error) {
|
||||
u := c.baseUrl.JoinPath("source", project, "_config")
|
||||
|
||||
req, err := http.NewRequest("GET", u.String(), nil)
|
||||
req.SetBasicAuth(c.user, c.password)
|
||||
res, err := c.client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if data, err := io.ReadAll(res.Body); err == nil {
|
||||
return string(data), nil
|
||||
} else {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ObsClient) BuildStatus(project string, packages ...string) (*BuildResultList, error) {
|
||||
u := c.baseUrl.JoinPath("build", project, "_result")
|
||||
query := u.Query()
|
||||
|
333
bots-common/pr.go
Normal file
333
bots-common/pr.go
Normal file
@@ -0,0 +1,333 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRInfo struct {
|
||||
PR *models.PullRequest
|
||||
Reviews *PRReviews
|
||||
}
|
||||
|
||||
type PRSet struct {
|
||||
PRs []PRInfo
|
||||
Config *AutogitConfig
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []PRInfo, config *AutogitConfig) ([]PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
retSet := []PRInfo{PRInfo{PR: pr}}
|
||||
|
||||
// only need to extact there on PrjGit PR
|
||||
if pr.Base.Repo.Name == config.GitProjectName && pr.Base.Repo.Owner.UserName == config.Organization {
|
||||
_, refPRs := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
for _, prdata := range refPRs {
|
||||
pr, err := gitea.GetPullRequest(prdata.Org, prdata.Repo, prdata.Num)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := readPRData(gitea, pr, slices.Concat(currentSet, retSet), config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
retSet = slices.Concat(retSet, data)
|
||||
}
|
||||
}
|
||||
|
||||
return retSet, nil
|
||||
}
|
||||
|
||||
func FetchPRSet(gitea GiteaPRFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
var pr *models.PullRequest
|
||||
var err error
|
||||
|
||||
if org != config.Organization || repo != config.GitProjectName {
|
||||
if pr, err = gitea.GetAssociatedPrjGitPR(config.Organization, config.GitProjectName, org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pr == nil {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
prs, err := readPRData(gitea, pr, nil, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRSet{PRs: prs, Config: config}, nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
|
||||
return pr.Base.Repo.Name == rs.Config.GitProjectName && pr.Base.Repo.Owner.UserName == rs.Config.Organization
|
||||
}
|
||||
|
||||
func (rs *PRSet) GetPrjGitPR() (*models.PullRequest, error) {
|
||||
var ret *models.PullRequest
|
||||
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
if ret == nil {
|
||||
ret = prinfo.PR
|
||||
} else {
|
||||
return nil, errors.New("Multiple PrjGit PRs in one review set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ret != nil {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("No PrjGit PR found")
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsConsistent() bool {
|
||||
prjpr, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
_, prjpr_set := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(prjpr.Body)))
|
||||
if len(prjpr_set) != len(rs.PRs)-1 { // 1 to many mapping
|
||||
return false
|
||||
}
|
||||
|
||||
next_rs:
|
||||
for _, prinfo := range rs.PRs {
|
||||
if prjpr == prinfo.PR {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, pr := range prjpr_set {
|
||||
if prinfo.PR.Base.Repo.Owner.UserName == pr.Org && prinfo.PR.Base.Repo.Name == pr.Repo && prinfo.PR.Index == pr.Num {
|
||||
continue next_rs
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintainers MaintainershipData) error {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
for _, pr := range rs.PRs {
|
||||
reviewers := []string{}
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
if len(rs.PRs) == 1 {
|
||||
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers())
|
||||
}
|
||||
} else {
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(), maintainers.ListPackageMaintainers(pkg))
|
||||
}
|
||||
|
||||
// submitters do not need to review their own work
|
||||
if idx := slices.Index(reviewers, pr.PR.User.UserName); idx != -1 {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
}
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
reviews, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for idx := 0; idx < len(reviewers); {
|
||||
user := reviewers[idx]
|
||||
if reviews.HasPendingReviewBy(user) || reviews.IsReviewedBy(user) {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
} else {
|
||||
idx++
|
||||
}
|
||||
}
|
||||
|
||||
// get maintainers associated with the PR too
|
||||
if len(reviewers) > 0 {
|
||||
if _, err := gitea.RequestReviews(pr.PR, reviewers...); err != nil {
|
||||
return fmt.Errorf("Cannot create reviews on %s/%s#%d for [%s]: %w", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", "), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData) bool {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
is_reviewed := false
|
||||
for _, pr := range rs.PRs {
|
||||
var reviewers []string
|
||||
var pkg string
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
pkg = ""
|
||||
} else {
|
||||
reviewers = configReviewers.Pkg
|
||||
pkg = pr.PR.Base.Repo.Name
|
||||
}
|
||||
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
is_reviewed = r.IsApproved()
|
||||
if !is_reviewed {
|
||||
return false
|
||||
}
|
||||
|
||||
if is_reviewed = maintainers.IsApproved(pkg, r.reviews); !is_reviewed {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return is_reviewed
|
||||
}
|
||||
|
||||
func (rs *PRSet) Merge(author, email string) error {
|
||||
prjgit, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gh := GitHandlerGeneratorImpl{}
|
||||
git, err := gh.CreateGitHandler(author, email, prjgit.Base.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", "--depth", "1", prjgit.Base.Repo.SSHURL, DefaultGitPrj)
|
||||
git.GitExecOrPanic(DefaultGitPrj, "fetch", "origin", prjgit.Base.Sha, prjgit.Head.Sha)
|
||||
|
||||
// if other changes merged, check if we have conflicts
|
||||
rev := strings.TrimSpace(git.GitExecWithOutputOrPanic(DefaultGitPrj, "merge-base", "HEAD", prjgit.Base.Sha, prjgit.Head.Sha))
|
||||
if rev != prjgit.Base.Sha {
|
||||
return fmt.Errorf("Base.Sha (%s) not yet merged into project-git. Aborting merge.", prjgit.Base.Sha)
|
||||
}
|
||||
/*
|
||||
rev := git.GitExecWithOutputOrPanic(common.DefaultGitPrj, "rev-list", "-1", "HEAD")
|
||||
if rev != prjgit.Base.Sha {
|
||||
panic("FIXME")
|
||||
}
|
||||
*/
|
||||
msg := "merging"
|
||||
|
||||
err = git.GitExec(DefaultGitPrj, "merge", "--no-ff", "-m", msg, prjgit.Head.Sha)
|
||||
if err != nil {
|
||||
status, statusErr := git.GitStatus(DefaultGitPrj)
|
||||
if statusErr != nil {
|
||||
return fmt.Errorf("Failed to merge: %w . Status also failed: %w", err, statusErr)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
|
||||
submodules, err := git.GitSubmoduleList(DefaultGitPrj, "MERGE_HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
for i := range mergedSubs {
|
||||
if path.Base(mergedSubs[i].Path) == subName {
|
||||
filteredSubs = append(filteredSubs, mergedSubs[i])
|
||||
continue nextSub
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("Cannot find submodule for path: %s", subName)
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), DefaultGitPrj, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
os.CopyFS("/tmp/test", os.DirFS(git.GetPath()))
|
||||
|
||||
git.GitExecOrPanic(DefaultGitPrj, "add", ".gitmodules")
|
||||
git.GitExecOrPanic(DefaultGitPrj, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FF all non-prj git
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", prinfo.PR.Base.Repo.SSHURL, prinfo.PR.Base.Name)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "fetch", "origin", prinfo.PR.Head.Sha)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "merge", "--ff", prinfo.PR.Head.Sha)
|
||||
}
|
||||
|
||||
// push changes
|
||||
git.GitExecOrPanic(DefaultGitPrj, "push", "origin")
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "push", "origin")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
554
bots-common/pr_test.go
Normal file
554
bots-common/pr_test.go
Normal file
@@ -0,0 +1,554 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestPR(t *testing.T) {
|
||||
baseConfig := common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
Organization: "foo",
|
||||
GitProjectName: "barPrj",
|
||||
}
|
||||
|
||||
type prdata struct {
|
||||
pr *models.PullRequest
|
||||
pr_err error
|
||||
reviews []*models.PullReview
|
||||
review_error error
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
data []prdata
|
||||
api_error string
|
||||
|
||||
resLen int
|
||||
reviewed bool
|
||||
consistentSet bool
|
||||
prjGitPRIndex int
|
||||
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRFetcher) (*common.PRSet, error)
|
||||
}{
|
||||
{
|
||||
name: "Error fetching PullRequest",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("Missing PR")},
|
||||
},
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Error fetching PullRequest in PrjGit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("missing PR")},
|
||||
{pr: &models.PullRequest{Body: "", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Error fetching prjgit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
|
||||
{
|
||||
name: "Review set is consistent: 1pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "some desc", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "some other desc\nPR: foo/fer#33", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set of prjgit PR is consistent",
|
||||
data: []prdata{
|
||||
{
|
||||
pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}},
|
||||
reviews: []*models.PullReview{
|
||||
{Body: "LGTM", User: &models.User{UserName: "m2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: "super2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: common.Bot_BuildReview}, State: common.ReviewStateApproved},
|
||||
},
|
||||
},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet(mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#222", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo2#41", Index: 20, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#20", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 2,
|
||||
consistentSet: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaPRChecker(ctl)
|
||||
// reviewer_mock := mock_common.NewMockGiteaReviewRequester(ctl)
|
||||
|
||||
if test.reviewSetFetcher == nil { // if we are fetching the prjgit directly, the these mocks are not called
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(test.data[test.prjGitPRIndex].pr, test.data[test.prjGitPRIndex].pr_err)
|
||||
} else if test.prjGitPRIndex < 0 {
|
||||
// no prjgit PR
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
var test_err error
|
||||
for _, data := range test.data {
|
||||
pr_mock.EXPECT().GetPullRequest(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.pr, data.pr_err).AnyTimes()
|
||||
if data.pr_err != nil {
|
||||
test_err = data.pr_err
|
||||
}
|
||||
review_mock.EXPECT().GetPullRequestReviews(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.reviews, data.review_error).AnyTimes()
|
||||
}
|
||||
|
||||
var res *common.PRSet
|
||||
var err error
|
||||
|
||||
if test.reviewSetFetcher != nil {
|
||||
res, err = test.reviewSetFetcher(pr_mock)
|
||||
} else {
|
||||
res, err = common.FetchPRSet(pr_mock, "test", "repo", 42, &baseConfig)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
if test_err != nil {
|
||||
t.Fatal("Expected", test_err, "but got", err)
|
||||
}
|
||||
} else {
|
||||
if res != nil {
|
||||
t.Fatal("error but got ReviewSet?")
|
||||
}
|
||||
|
||||
if test.api_error != "" {
|
||||
if err.Error() != test.api_error {
|
||||
t.Fatal("expected", test.api_error, "but got", err)
|
||||
}
|
||||
} else if test_err != err {
|
||||
t.Fatal("expected", test_err, "but got", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if test.resLen != len(res.PRs) {
|
||||
t.Error("expected result len", test.resLen, "but got", len(res.PRs))
|
||||
}
|
||||
|
||||
PrjGitPR, err := res.GetPrjGitPR()
|
||||
if test.prjGitPRIndex < 0 {
|
||||
if err == nil {
|
||||
t.Error("expected error, but nothing")
|
||||
}
|
||||
}
|
||||
pr_found := false
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
for i := range test.data {
|
||||
if PrjGitPR == test.data[i].pr && i == test.prjGitPRIndex {
|
||||
t.Log("found at index", i)
|
||||
pr_found = true
|
||||
}
|
||||
}
|
||||
if !pr_found {
|
||||
t.Error("Cannot find expected PrjGit location in PR set", PrjGitPR)
|
||||
}
|
||||
} else {
|
||||
if PrjGitPR != nil {
|
||||
t.Log("Expected prjgit not found, but found?", PrjGitPR)
|
||||
}
|
||||
}
|
||||
|
||||
if isConsistent := res.IsConsistent(); isConsistent != test.consistentSet {
|
||||
t.Error("IsConsistent() returned unexpected:", isConsistent)
|
||||
}
|
||||
/*
|
||||
if err := res.AssignReviewers(reviewer_mock); err != nil {
|
||||
t.Error("expected no errors assigning reviewers:", err)
|
||||
}
|
||||
*/
|
||||
|
||||
maintainers := mock_common.NewMockMaintainershipData(ctl)
|
||||
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any()).Return(true).AnyTimes()
|
||||
|
||||
if isApproved := res.IsApproved(review_mock, maintainers); isApproved != test.reviewed {
|
||||
t.Error("expected reviewed to be NOT", isApproved)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRAssignReviewers(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
pkgReviews []*models.PullReview
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "No reviewers",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer and one pkg reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"user2", "prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "Reviews are done",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{},
|
||||
},
|
||||
{
|
||||
name: "Stale review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "pkgrepo", Owner: &models.User{UserName: "other"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("other", "pkgrepo", int64(1)).Return(test.pkgReviews, nil)
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR("org", "repo", "other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: fmt.Sprintf(common.PrPattern, "other", "pkgrepo", 1),
|
||||
User: &models.User{UserName: "bot1"},
|
||||
RequestedReviewers: []*models.User{{UserName: "main_reviewer"}},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 42,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo").Return([]string{"pkgmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "other", "pkgrepo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 2 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
r = test.expectedReviewerCall[1]
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
|
||||
prjgit_tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "PrjMaintainers in prjgit review when not part of pkg set",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot", "prjmaintainer"}},
|
||||
},
|
||||
}
|
||||
for _, test := range prjgit_tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "org", "repo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 1 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
t.Fatal("only prjgit pr here")
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRMerge(t *testing.T) {
|
||||
cwd, _ := os.Getwd()
|
||||
cmd := exec.Command("/usr/bin/bash", path.Join(cwd, "test_repo_setup.sh"))
|
||||
cmd.Dir = t.TempDir()
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
t.Fatal(string(out))
|
||||
}
|
||||
|
||||
common.ExtraGitParams = []string{
|
||||
"GIT_CONFIG_COUNT=1",
|
||||
"GIT_CONFIG_KEY_0=protocol.file.allow",
|
||||
"GIT_CONFIG_VALUE_0=always",
|
||||
|
||||
"GIT_AUTHOR_NAME=testname",
|
||||
"GIT_AUTHOR_EMAIL=test@suse.com",
|
||||
"GIT_AUTHOR_DATE='2005-04-07T22:13:13'",
|
||||
"GIT_COMMITTER_NAME=testname",
|
||||
"GIT_COMMITTER_EMAIL=test@suse.com",
|
||||
"GIT_COMMITTER_DATE='2005-04-07T22:13:13'",
|
||||
}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Organization: "org",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
pr *models.PullRequest
|
||||
mergeError string
|
||||
}{
|
||||
{
|
||||
name: "Merge base not merged in main",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "e8b0de43d757c96a9d2c7101f4bff404e322f53a1fa4041fb85d646110c38ad4", // "base_add_b1"
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
mergeError: "Aborting merge",
|
||||
},
|
||||
{
|
||||
name: "Merge conflict in modules",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "4fbd1026b2d7462ebe9229a49100c11f1ad6555520a21ba515122d8bc41328a8",
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
|
||||
mock.EXPECT().GetPullRequest("org", "prj", int64(1)).Return(test.pr, nil)
|
||||
|
||||
set, err := common.FetchPRSet(mock, "org", "prj", 1, config)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = set.Merge("test", "test@example.com"); err != nil && (test.mergeError == "" || (len(test.mergeError) > 0 && !strings.Contains(err.Error(), test.mergeError))) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -87,12 +87,8 @@ func ParseRequestJSON(reqType string, data []byte) (req *Request, err error) {
|
||||
}
|
||||
|
||||
type RequestHandler struct {
|
||||
Branch string
|
||||
PrjGit string
|
||||
|
||||
StdLogger, ErrLogger *log.Logger
|
||||
Request *Request
|
||||
// Git *GitHandler
|
||||
}
|
||||
|
||||
func (r *RequestHandler) WriteError() {
|
||||
|
@@ -22,6 +22,8 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type Head struct {
|
||||
@@ -43,7 +45,7 @@ type IssueLabelDetail struct {
|
||||
type PullRequest struct {
|
||||
Id int
|
||||
Url string
|
||||
Number int
|
||||
Number int64
|
||||
State string
|
||||
|
||||
Base Head
|
||||
@@ -56,7 +58,7 @@ type PullRequest struct {
|
||||
|
||||
type PullRequestWebhookEvent struct {
|
||||
Action string
|
||||
Number int
|
||||
Number int64
|
||||
|
||||
Pull_Request *PullRequest
|
||||
Repository *Repository
|
||||
@@ -68,6 +70,43 @@ func (p *PullRequestWebhookEvent) GetAction() string {
|
||||
return p.Action
|
||||
}
|
||||
|
||||
func PullRequestLabelFromModel(labels []*models.Label) []IssueLabelDetail {
|
||||
l := make([]IssueLabelDetail, len(labels))
|
||||
for i := range labels {
|
||||
l[i].Id = int(labels[i].ID)
|
||||
l[i].Name = labels[i].Name
|
||||
l[i].Exclusive = labels[i].Exclusive
|
||||
l[i].Is_archived = labels[i].IsArchived
|
||||
l[i].Color = labels[i].Color
|
||||
l[i].Description = labels[i].Description
|
||||
l[i].Url = labels[i].URL
|
||||
}
|
||||
|
||||
return l
|
||||
}
|
||||
|
||||
func PullRequestFromModel(pr *models.PullRequest) *PullRequest {
|
||||
return &PullRequest{
|
||||
Id: int(pr.ID),
|
||||
Url: pr.URL,
|
||||
Number: pr.Index,
|
||||
State: string(pr.State),
|
||||
|
||||
Base: Head{
|
||||
Ref: pr.Base.Ref,
|
||||
Sha: pr.Base.Sha,
|
||||
Repo: RepositoryFromModel(pr.Base.Repo),
|
||||
},
|
||||
Head: Head{
|
||||
Ref: pr.Head.Ref,
|
||||
Sha: pr.Head.Sha,
|
||||
Repo: RepositoryFromModel(pr.Head.Repo),
|
||||
},
|
||||
Labels: PullRequestLabelFromModel(pr.Labels),
|
||||
User: *UserFromModel(pr.User),
|
||||
}
|
||||
}
|
||||
|
||||
func (h *RequestHandler) parsePullRequest(data io.Reader) (action *PullRequestWebhookEvent, err error) {
|
||||
action = new(PullRequestWebhookEvent)
|
||||
err = json.NewDecoder(data).Decode(&action)
|
||||
|
@@ -44,7 +44,7 @@ func (*PushWebhookEvent) GetAction() string {
|
||||
return "push"
|
||||
}
|
||||
|
||||
func (h *RequestHandler) parsePushRequest(data io.Reader) (*PushWebhookEvent, error) {
|
||||
func (h *RequestHandler) ParsePushRequest(data io.Reader) (*PushWebhookEvent, error) {
|
||||
action := new(PushWebhookEvent)
|
||||
err := json.NewDecoder(data).Decode(&action)
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package common
|
||||
package common_test
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
@@ -22,14 +22,16 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestPushRequestParsing(t *testing.T) {
|
||||
t.Run("parsing repo creation message", func(t *testing.T) {
|
||||
var h RequestHandler
|
||||
var h common.RequestHandler
|
||||
|
||||
h.StdLogger, h.ErrLogger = CreateStdoutLogger(os.Stdout, os.Stderr)
|
||||
json, err := h.parsePushRequest(strings.NewReader(examplePushJSON))
|
||||
h.StdLogger, h.ErrLogger = common.CreateStdoutLogger(os.Stdout, os.Stderr)
|
||||
json, err := h.ParsePushRequest(strings.NewReader(examplePushJSON))
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parser push request: %v", err)
|
||||
}
|
||||
|
@@ -23,6 +23,8 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type Repository struct {
|
||||
@@ -59,12 +61,50 @@ type RepositoryWebhookEvent struct {
|
||||
PrjGit string
|
||||
}
|
||||
|
||||
func UserFromModel(user *models.User) *User {
|
||||
return &User {
|
||||
Id: int(user.ID),
|
||||
Username: user.UserName,
|
||||
}
|
||||
}
|
||||
|
||||
func UsersFromModel(users []*models.User) []*User {
|
||||
u := make([]*User, len(users))
|
||||
for i := range users {
|
||||
u[i] = UserFromModel(users[i])
|
||||
}
|
||||
return u
|
||||
}
|
||||
|
||||
func RepositoryFromModel(repo *models.Repository) *Repository {
|
||||
if repo == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &Repository{
|
||||
Id: uint(repo.ID),
|
||||
Name: repo.Name,
|
||||
Full_Name: repo.FullName,
|
||||
Fork: repo.Fork,
|
||||
Parent: RepositoryFromModel(repo.Parent),
|
||||
Owner: &Organization{
|
||||
Id: uint(repo.Owner.ID),
|
||||
Username: repo.Owner.UserName,
|
||||
},
|
||||
Clone_Url: repo.CloneURL,
|
||||
|
||||
Ssh_Url: repo.SSHURL,
|
||||
Default_Branch: repo.DefaultBranch,
|
||||
Object_Format_Name: repo.ObjectFormatName,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RepositoryWebhookEvent) GetAction() string {
|
||||
return r.Action
|
||||
}
|
||||
|
||||
// TODO: sanity check values!!!!
|
||||
func (h *RequestHandler) parseRepositoryRequest(dataReader io.Reader) (data *RepositoryWebhookEvent, err error) {
|
||||
func (h *RequestHandler) ParseRepositoryRequest(dataReader io.Reader) (data *RepositoryWebhookEvent, err error) {
|
||||
data = new(RepositoryWebhookEvent)
|
||||
if err = json.NewDecoder(dataReader).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package common
|
||||
package common_test
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
@@ -22,6 +22,8 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type testLogger struct {
|
||||
@@ -35,10 +37,10 @@ func (s *testLogger) WriteString(str2 string) (int, error) {
|
||||
|
||||
func TestRepositoryRequestParsing(t *testing.T) {
|
||||
t.Run("parsing repo creation message", func(t *testing.T) {
|
||||
var h RequestHandler
|
||||
var h common.RequestHandler
|
||||
|
||||
h.StdLogger, h.ErrLogger = CreateStdoutLogger(os.Stdout, os.Stdout)
|
||||
json, err := h.parseRepositoryRequest(strings.NewReader(repoCreateJSON))
|
||||
h.StdLogger, h.ErrLogger = common.CreateStdoutLogger(os.Stdout, os.Stdout)
|
||||
json, err := h.ParseRepositoryRequest(strings.NewReader(repoCreateJSON))
|
||||
if err != nil {
|
||||
t.Fatalf("Can't parse struct: %s", err)
|
||||
}
|
||||
|
33
bots-common/reviewers.go
Normal file
33
bots-common/reviewers.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
)
|
||||
|
||||
type Reviewers struct {
|
||||
Prj []string
|
||||
Pkg []string
|
||||
}
|
||||
|
||||
func ParseReviewers(input []string) *Reviewers {
|
||||
r := &Reviewers{}
|
||||
for _, reviewer := range input {
|
||||
switch reviewer[0] {
|
||||
case '*':
|
||||
r.Prj = append(r.Prj, reviewer[1:])
|
||||
r.Pkg = append(r.Pkg, reviewer[1:])
|
||||
case '-':
|
||||
r.Prj = append(r.Prj, reviewer[1:])
|
||||
case '+':
|
||||
r.Pkg = append(r.Pkg, reviewer[1:])
|
||||
default:
|
||||
r.Pkg = append(r.Pkg, reviewer)
|
||||
}
|
||||
}
|
||||
|
||||
if !slices.Contains(r.Prj, Bot_BuildReview) {
|
||||
r.Prj = append(r.Prj, Bot_BuildReview)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
38
bots-common/reviewers_test.go
Normal file
38
bots-common/reviewers_test.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestReviewers(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []string
|
||||
|
||||
prj []string
|
||||
pkg []string
|
||||
}{
|
||||
{
|
||||
name: "project and package reviewers",
|
||||
input: []string{"1", "2", "3", "*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"5", "7", common.Bot_BuildReview},
|
||||
pkg: []string{"1", "2", "3", "5", "6"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
reviewers := common.ParseReviewers(test.input)
|
||||
if !slices.Equal(reviewers.Prj, test.prj) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Prj)
|
||||
}
|
||||
if !slices.Equal(reviewers.Pkg, test.pkg) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Pkg)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
87
bots-common/reviews.go
Normal file
87
bots-common/reviews.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRReviews struct {
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
}
|
||||
|
||||
func FetchGiteaReviews(rf GiteaReviewFetcher, reviewers []string, org, repo string, no int64) (*PRReviews, error) {
|
||||
reviews, err := rf.GetPullRequestReviews(org, repo, no)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRReviews{
|
||||
reviews: reviews,
|
||||
reviewers: reviewers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsApproved() bool {
|
||||
goodReview := false
|
||||
|
||||
for _, reviewer := range r.reviewers {
|
||||
goodReview = false
|
||||
for _, review := range r.reviews {
|
||||
if review.User.UserName == reviewer && review.State == ReviewStateApproved && !review.Stale {
|
||||
goodReview = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !goodReview {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return goodReview
|
||||
}
|
||||
|
||||
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
isPending := false
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
fallthrough
|
||||
case ReviewStateRequestChanges:
|
||||
return false
|
||||
case ReviewStateRequestReview:
|
||||
fallthrough
|
||||
case ReviewStatePending:
|
||||
isPending = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return isPending
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
return true
|
||||
case ReviewStateRequestChanges:
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
140
bots-common/reviews_test.go
Normal file
140
bots-common/reviews_test.go
Normal file
@@ -0,0 +1,140 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestReviews(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
fetchErr error
|
||||
isApproved bool
|
||||
isReviewedByTest1 bool
|
||||
isPendingByTest1 bool
|
||||
}{
|
||||
{
|
||||
name: "Reviews of unreviews PR",
|
||||
isApproved: false,
|
||||
},
|
||||
{
|
||||
name: "Single reviewer done",
|
||||
reviews: []*models.PullReview{&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}}},
|
||||
reviewers: []string{"user1"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one not approved",
|
||||
reviews: []*models.PullReview{&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}}},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale approved",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one is pending",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale and pending",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: false,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer approved",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer approved, but fetch error",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
fetchErr: errors.New("System error fetching reviews."),
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Extra reviewers are ignored",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user4"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
rf := mock_common.NewMockGiteaReviewFetcher(ctl)
|
||||
|
||||
rf.EXPECT().GetPullRequestReviews("test", "pr", int64(1)).Return(test.reviews, test.fetchErr)
|
||||
|
||||
reviews, err := common.FetchGiteaReviews(rf, test.reviewers, "test", "pr", 1)
|
||||
|
||||
if test.fetchErr != nil {
|
||||
if err != test.fetchErr {
|
||||
t.Fatal("FetchReviews() failed with unexpected error:", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if r := reviews.IsApproved(); r != test.isApproved {
|
||||
t.Fatal("Unexpected IsReviewed():", r, "vs. expected", test.isApproved)
|
||||
}
|
||||
|
||||
if r := reviews.HasPendingReviewBy("user1"); r != test.isPendingByTest1 {
|
||||
t.Fatal("Unexpected IsReviewPendingBy(user1):", r)
|
||||
}
|
||||
if r := reviews.IsReviewedBy("user1"); r != test.isReviewedByTest1 {
|
||||
t.Fatal("Unexpected IsReviewedBy(user1):", r)
|
||||
}
|
||||
|
||||
if r := reviews.HasPendingReviewBy("random"); r {
|
||||
t.Fatal("Unexpected IsReviewPendingBy(random):", r)
|
||||
}
|
||||
if r := reviews.IsReviewedBy("random"); r {
|
||||
t.Fatal("Unexpected IsReviewedBy(random):", r)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
173
bots-common/submodules.go
Normal file
173
bots-common/submodules.go
Normal file
@@ -0,0 +1,173 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type Submodule struct {
|
||||
Name string
|
||||
Path string
|
||||
Url string
|
||||
|
||||
Update string
|
||||
Branch string
|
||||
Ignore string
|
||||
Shallow string
|
||||
}
|
||||
|
||||
var SyntaxError error = errors.New("Syntax error")
|
||||
|
||||
func (sub *Submodule) parseSubmoduleName(line string) error {
|
||||
line = strings.TrimSpace(line)
|
||||
if line[0:10] != "submodule " {
|
||||
return SyntaxError
|
||||
}
|
||||
|
||||
r := strings.NewReader(line[10:])
|
||||
|
||||
ch, _, err := r.ReadRune()
|
||||
for ; ; ch, _, err = r.ReadRune() {
|
||||
if err != nil {
|
||||
return fmt.Errorf("%c %d", ch, 32)
|
||||
}
|
||||
if !unicode.IsSpace(ch) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if ch != '"' {
|
||||
return fmt.Errorf("%c %d", ch, 36)
|
||||
// return SyntaxError
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
for ch, _, err = r.ReadRune(); ch != '"'; ch, _, err = r.ReadRune() {
|
||||
if err != nil {
|
||||
return fmt.Errorf("%c %d", ch, 32)
|
||||
}
|
||||
b.WriteRune(ch)
|
||||
}
|
||||
|
||||
if ch != '"' {
|
||||
return fmt.Errorf("%c %d", ch, 45)
|
||||
// return SyntaxError
|
||||
}
|
||||
|
||||
for ch, _, err = r.ReadRune(); unicode.IsSpace(ch); ch, _, err = r.ReadRune() {
|
||||
if err != nil {
|
||||
return fmt.Errorf("%d %w", 38, err)
|
||||
}
|
||||
}
|
||||
|
||||
if ch != ']' || err != nil {
|
||||
return fmt.Errorf("%c %d, %w", ch, 50, err)
|
||||
// return SyntaxError
|
||||
}
|
||||
|
||||
sub.Name = b.String()
|
||||
|
||||
for ch, _, err = r.ReadRune(); ; ch, _, _ = r.ReadRune() {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
if !unicode.IsSpace(ch) {
|
||||
return SyntaxError
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Submodule) parseKeyValue(line string) error {
|
||||
eqLoc := strings.Index(line, "=")
|
||||
if eqLoc < 0 || eqLoc == len(line)-1 {
|
||||
return SyntaxError
|
||||
}
|
||||
|
||||
key := strings.ToLower(strings.TrimSpace(line[0:eqLoc]))
|
||||
val := strings.TrimSpace(line[eqLoc+1:])
|
||||
|
||||
if len(val) == 0 {
|
||||
return SyntaxError
|
||||
}
|
||||
|
||||
switch key {
|
||||
case "path":
|
||||
s.Path = val
|
||||
case "url":
|
||||
s.Url = val
|
||||
case "shallow":
|
||||
s.Shallow = val
|
||||
case "ignore":
|
||||
s.Ignore = val
|
||||
case "branch":
|
||||
s.Branch = val
|
||||
case "update":
|
||||
s.Update = val
|
||||
default:
|
||||
return SyntaxError
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var sub *Submodule
|
||||
ret := []Submodule{}
|
||||
|
||||
lines := bytes.Split(data, []byte("\n"))
|
||||
for _, l := range lines {
|
||||
line := string(bytes.TrimSpace(l))
|
||||
if len(line) == 0 {
|
||||
continue
|
||||
}
|
||||
if line[0] == '[' {
|
||||
if sub != nil {
|
||||
ret = append(ret, *sub)
|
||||
}
|
||||
sub = &Submodule{}
|
||||
if err := sub.parseSubmoduleName(line[1:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if sub == nil {
|
||||
return nil, SyntaxError
|
||||
} else {
|
||||
if err := sub.parseKeyValue(line); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
if sub != nil {
|
||||
ret = append(ret, *sub)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func writeValue(out io.Writer, key, value string) {
|
||||
if len(value) > 0 {
|
||||
out.Write([]byte(fmt.Sprintf("\t%s = %s\n", key, value)))
|
||||
}
|
||||
}
|
||||
|
||||
func WriteSubmodules(subs []Submodule, out io.Writer) error {
|
||||
for _, sub := range subs {
|
||||
if len(sub.Name) < 1 {
|
||||
return fmt.Errorf("Submodule with no name.")
|
||||
}
|
||||
out.Write([]byte(fmt.Sprintf("[submodule \"%s\"]\n", sub.Name)))
|
||||
writeValue(out, "path", sub.Path)
|
||||
writeValue(out, "url", sub.Url)
|
||||
writeValue(out, "branch", sub.Branch)
|
||||
writeValue(out, "ignore", sub.Ignore)
|
||||
writeValue(out, "shallow", sub.Shallow)
|
||||
writeValue(out, "update", sub.Update)
|
||||
}
|
||||
return nil
|
||||
}
|
195
bots-common/submodules_test.go
Normal file
195
bots-common/submodules_test.go
Normal file
@@ -0,0 +1,195 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestSubmodulesParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
file string
|
||||
subs []common.Submodule
|
||||
has_error bool
|
||||
}{
|
||||
{
|
||||
name: "Empty submodules file",
|
||||
file: "",
|
||||
subs: []common.Submodule{},
|
||||
},
|
||||
{
|
||||
name: "Empty single submodule",
|
||||
file: "[submodule \"Foo\"]",
|
||||
subs: []common.Submodule{
|
||||
{Name: "Foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Invalid submodule name",
|
||||
file: "[submodule \"Foo']",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid submodule name",
|
||||
file: "[submodule 'Foo']",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid submodule name",
|
||||
file: "[submodule Foo]",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid submodule name",
|
||||
file: "[submodul \"Foo\"]",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid submodule name",
|
||||
file: "[submodule \"Foo\"",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Gerbage input",
|
||||
file: "asdf kjasf[d;fkl",
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "Submodule with one entry",
|
||||
file: "[submodule \"libfoo\"]\npath = foo\n\n",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "libfoo",
|
||||
Path: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Submodules with funny entries entries",
|
||||
file: "[submodule \"libfoo\"]\npath = foo [ bar \n\n [ submodule \"test \" ]\npath=ma ma\nurl= safe",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "libfoo",
|
||||
Path: "foo [ bar",
|
||||
},
|
||||
{
|
||||
Name: "test ",
|
||||
Path: "ma ma",
|
||||
Url: "safe",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Submodule with valid entries",
|
||||
file: "[submodule \"libfoo\"]\npath=foo\nurl=goo\nupdate=none\nbranch=test\nignore=all\nshallow=true",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "libfoo",
|
||||
Path: "foo",
|
||||
Url: "goo",
|
||||
Update: "none",
|
||||
Branch: "test",
|
||||
Ignore: "all",
|
||||
Shallow: "true",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Submodule with an valid entry",
|
||||
file: "[submodule \"libfoo\"]\npath=foo\nurl=goo\nupdate=none\nbranch=test\nignore=all\nshallow=true\nunknown = something",
|
||||
has_error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
mods, err := common.ParseSubmodulesFile(strings.NewReader(test.file))
|
||||
if test.has_error {
|
||||
if err == nil {
|
||||
t.Error("Expected an error")
|
||||
}
|
||||
} else if err != nil {
|
||||
t.Error("unexpected error:", err)
|
||||
}
|
||||
if !slices.Equal(mods, test.subs) {
|
||||
t.Error("expected", test.subs, "but got", mods)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSubmodulesWriting(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
subs []common.Submodule
|
||||
output []byte
|
||||
has_error bool
|
||||
}{
|
||||
{
|
||||
name: "empty Submodules",
|
||||
output: []byte(""),
|
||||
},
|
||||
{
|
||||
name: "single submodule",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "foo",
|
||||
Url: "bar",
|
||||
},
|
||||
},
|
||||
output: []byte("[submodule \"foo\"]\n\turl = bar\n"),
|
||||
},
|
||||
{
|
||||
name: "empty name submodule",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "foo",
|
||||
Url: "bar",
|
||||
},
|
||||
{},
|
||||
},
|
||||
has_error: true,
|
||||
},
|
||||
{
|
||||
name: "submodule with all the things",
|
||||
subs: []common.Submodule{
|
||||
{
|
||||
Name: "foo",
|
||||
Url: "bar",
|
||||
},
|
||||
{
|
||||
Name: "1",
|
||||
Url: "2",
|
||||
Update: "ok",
|
||||
Path: "3",
|
||||
Branch: "4",
|
||||
Ignore: "5",
|
||||
Shallow: "6",
|
||||
},
|
||||
},
|
||||
output: []byte("[submodule \"foo\"]\n\turl = bar\n[submodule \"1\"]\n\tpath = 3\n\turl = 2\n\tbranch = 4\n\tignore = 5\n\tshallow = 6\n\tupdate = ok\n"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
out := bytes.Buffer{}
|
||||
if err := common.WriteSubmodules(test.subs, &out); err != nil {
|
||||
if !test.has_error {
|
||||
t.Error(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
if test.has_error {
|
||||
t.Error("expected an error")
|
||||
}
|
||||
if !slices.Equal(out.Bytes(), test.output) {
|
||||
t.Error("expected:", test.output, "but got:", out.Bytes())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
127
bots-common/test_repo_setup.sh
Executable file
127
bots-common/test_repo_setup.sh
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
set -x
|
||||
|
||||
export GIT_CONFIG_COUNT=2
|
||||
|
||||
export GIT_CONFIG_KEY_0=protocol.file.allow
|
||||
export GIT_CONFIG_VALUE_0=always
|
||||
export GIT_CONFIG_KEY_1=init.defaultBranch
|
||||
export GIT_CONFIG_VALUE_1=main
|
||||
|
||||
export GIT_AUTHOR_NAME=testname
|
||||
export GIT_AUTHOR_EMAIL=test@suse.com
|
||||
export GIT_AUTHOR_DATE='2005-04-07T22:13:13'
|
||||
export GIT_COMMITTER_NAME=testname
|
||||
export GIT_COMMITTER_EMAIL=test@suse.com
|
||||
export GIT_COMMITTER_DATE='2005-04-07T22:13:13'
|
||||
|
||||
create_prjgit_sample() {
|
||||
mkdir prjgit
|
||||
pushd prjgit
|
||||
|
||||
git init -q --object-format=sha256 -b main
|
||||
echo Project git is here > README.md
|
||||
git add README.md
|
||||
git config receive.denyCurrentBranch ignore
|
||||
|
||||
git submodule init
|
||||
git submodule -q add ../pkgA pkgA
|
||||
git submodule -q add ../pkgB pkgB
|
||||
git submodule -q add ../pkgC pkgC
|
||||
git commit -q -m 'first commit'
|
||||
|
||||
git checkout -b base_add_b1 main
|
||||
git submodule -q add ../pkgB1 pkgB1
|
||||
git commit -q -m "pkgB1 added"
|
||||
|
||||
git checkout -b base_add_b2 main
|
||||
git clean -ffxd
|
||||
git submodule -q add ../pkgB2 pkgB2
|
||||
git commit -q -m "pkgB2 added"
|
||||
|
||||
git checkout main
|
||||
git clean -ffxd
|
||||
git submodule -q add -f ../pkgB1 pkgB1
|
||||
git commit -q -m "main adding pkgB1"
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgA() {
|
||||
mkdir pkgA
|
||||
pushd pkgA
|
||||
|
||||
git init -q --object-format=sha256
|
||||
git config receive.denyCurrentBranch ignore
|
||||
echo "Package A" > README.md
|
||||
git add README.md
|
||||
|
||||
git commit -q -m 'Something base here'
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgB() {
|
||||
mkdir pkgB
|
||||
pushd pkgB
|
||||
|
||||
git init -q --object-format=sha256
|
||||
git config receive.denyCurrentBranch ignore
|
||||
echo "Package B" > README.md
|
||||
git add README.md
|
||||
|
||||
git commit -q -m 'Something also base here'
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgB1() {
|
||||
mkdir pkgB1
|
||||
pushd pkgB1
|
||||
|
||||
git init -q --object-format=sha256
|
||||
git config receive.denyCurrentBranch ignore
|
||||
echo "Package B1" > README.md
|
||||
git add README.md
|
||||
|
||||
git commit -q -m 'Something also base here'
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgB2() {
|
||||
mkdir pkgB2
|
||||
pushd pkgB2
|
||||
|
||||
git init -q --object-format=sha256
|
||||
git config receive.denyCurrentBranch ignore
|
||||
echo "Package B2" > README.md
|
||||
git add README.md
|
||||
|
||||
git commit -q -m 'Something also base here'
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgC() {
|
||||
mkdir pkgC
|
||||
pushd pkgC
|
||||
|
||||
git init -q --object-format=sha256
|
||||
git config receive.denyCurrentBranch ignore
|
||||
echo "Package C" > README.md
|
||||
git add README.md
|
||||
|
||||
git commit -q -m 'Something another base here'
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
create_pkgA
|
||||
create_pkgB
|
||||
create_pkgB1
|
||||
create_pkgB2
|
||||
create_pkgC
|
||||
create_prjgit_sample
|
||||
|
50
bots-common/utils.go
Normal file
50
bots-common/utils.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func SplitStringNoEmpty(str, sep string) []string {
|
||||
ret := slices.DeleteFunc(strings.Split(str, sep), func(s string) bool {
|
||||
return len(strings.TrimSpace(s)) == 0
|
||||
})
|
||||
for i := range ret {
|
||||
ret[i] = strings.TrimSpace(ret[i])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func TranslateHttpsToSshUrl(url string) (string, error) {
|
||||
const url1 = "https://src.opensuse.org/"
|
||||
const url2 = "https://src.suse.de/"
|
||||
|
||||
if len(url) > len(url1) && url[0:len(url1)] == url1 {
|
||||
return "gitea@src.opensuse.org:" + url[len(url1):], nil
|
||||
}
|
||||
if len(url) > len(url2) && url[0:len(url2)] == url2 {
|
||||
return "gitea@src.suse.de:" + url[len(url2):], nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("Unknown input url %s", url)
|
||||
}
|
||||
|
13
dev_test_helper.sh
Executable file
13
dev_test_helper.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ "x$1" = 'x' ]; then
|
||||
echo 'param to `go test --run ...` missing'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
while true; do
|
||||
go test --run "$1"
|
||||
inotifywait --exclude 'node_modules' -qqr -e close_write .. && clear
|
||||
sleep 0.2
|
||||
done
|
||||
|
@@ -8,7 +8,6 @@ Requirements:
|
||||
* `osc` command-line tool
|
||||
* `git-importer` command-line tool
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
@@ -17,7 +16,6 @@ Areas of Responsibility
|
||||
* Uses `git-importer` to import packages
|
||||
* Set maintainership information for devel project
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
|
@@ -1,10 +1,15 @@
|
||||
module src.opensuse.org/devel-importer
|
||||
|
||||
go 1.22.5
|
||||
go 1.23.1
|
||||
|
||||
toolchain go1.24rc2
|
||||
|
||||
replace src.opensuse.org/autogits/common => ../bots-common
|
||||
|
||||
require src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
require (
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
@@ -15,7 +20,6 @@ require (
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
github.com/go-openapi/runtime v0.28.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/strfmt v0.23.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
@@ -26,6 +30,7 @@ require (
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
github.com/rabbitmq/amqp091-go v1.10.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
|
@@ -48,6 +48,8 @@ github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
|
||||
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
@@ -64,6 +66,10 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,759 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="640"
|
||||
height="480"
|
||||
viewBox="0 0 169.33333 127"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
||||
sodipodi:docname="project-update.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<script
|
||||
id="script2">
|
||||
console.log("script")
|
||||
window.addEventListener('load', (event) => {
|
||||
layerNo = 1
|
||||
maxLayer = 1
|
||||
|
||||
updateLayers = () => {
|
||||
for (i=0; i<svg.children.length; i++) {
|
||||
elem = svg.children[i]
|
||||
e = elem.attributes['inkscape:label']
|
||||
|
||||
if(e && e.value == 'Layer' + layerNo) {
|
||||
elem.style = "display: inline;"
|
||||
} else if (e && e.value.startsWith('Layer') && !isNaN(Number(e.value.slice(5)))) {
|
||||
elem.style = "display: none;"
|
||||
}
|
||||
}
|
||||
layerNo = 1 + layerNo % maxLayer;
|
||||
};
|
||||
|
||||
svg = document.getElementById("svg5");
|
||||
for (i=0; i<svg.children.length; i++) {
|
||||
e=svg.children[i].attributes['inkscape:label']
|
||||
if (e && e.value.startsWith('Layer') && !isNaN(Number(e.value.slice(5))) && maxLayer < Number(e.value.slice(5))) {
|
||||
maxLayer = Number(e.value.slice(5))
|
||||
}
|
||||
}
|
||||
|
||||
updateLayers()
|
||||
setInterval(updateLayers, 2000)
|
||||
})
|
||||
</script>
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pagecheckerboard="true"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="px"
|
||||
showgrid="false"
|
||||
showborder="false"
|
||||
showguides="true"
|
||||
borderlayer="false"
|
||||
inkscape:zoom="1.1048941"
|
||||
inkscape:cx="556.61444"
|
||||
inkscape:cy="341.20918"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1055"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer2">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid12931" />
|
||||
</sodipodi:namedview>
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="TriangleStart"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto-start-reverse"
|
||||
inkscape:stockid="TriangleStart"
|
||||
markerWidth="5.3244081"
|
||||
markerHeight="6.155385"
|
||||
viewBox="0 0 5.3244081 6.1553851"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always"
|
||||
preserveAspectRatio="xMidYMid">
|
||||
<path
|
||||
transform="scale(0.5)"
|
||||
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
|
||||
d="M 5.77,0 -2.88,5 V -5 Z"
|
||||
id="path135" />
|
||||
</marker>
|
||||
<linearGradient
|
||||
id="linearGradient11538"
|
||||
inkscape:swatch="solid">
|
||||
<stop
|
||||
style="stop-color:#00ff00;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop11536" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g
|
||||
inkscape:label="Layer1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
style="display:inline"
|
||||
sodipodi:insensitive="true">
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11542"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="35.026764"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11544"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="48.632828"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11546"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="62.238888"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11548"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="69.041916"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11550"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="55.435856"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11552"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="34.920143"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11554"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="40.535263"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11556"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="46.150379"
|
||||
cx="117.90185" />
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8745"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="12.345296"
|
||||
ry="4.4311748" />
|
||||
<g
|
||||
id="g12938"
|
||||
inkscape:transform-center-x="20.555337"
|
||||
inkscape:transform-center-y="-0.76137238">
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8747"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="75.344467"
|
||||
ry="4.4311748" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="36.571346"
|
||||
y="85.468292"
|
||||
id="text12313"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12311"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="36.571346"
|
||||
y="85.468292">abc123</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="mix-blend-mode:normal;fill:#ff0000;stroke:#971d20;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect8563"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="12.358525"
|
||||
ry="4.4311748" />
|
||||
<rect
|
||||
style="fill:#ff0000;stroke:#971d20;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8749"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="50.890579"
|
||||
ry="4.4311748" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse12343"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="41.829796"
|
||||
cx="42.27565" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="34.686447"
|
||||
y="9.218442"
|
||||
id="text12347"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12345"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="34.686447"
|
||||
y="9.218442">Pkg A</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="104.41515"
|
||||
y="9.481307"
|
||||
id="text12351"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12349"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="104.41515"
|
||||
y="9.481307">Project B</tspan></text>
|
||||
<path
|
||||
style="fill:#0000ff;fill-rule:evenodd;stroke:#3744ff;stroke-width:0.614402px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 100.90122,75.053007 V 100.05301"
|
||||
id="dirlist"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.26206"
|
||||
y="78.953926"
|
||||
id="text12521"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12519"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.26206"
|
||||
y="78.953926">_config</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="84.456581"
|
||||
id="text12525"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12523"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="84.456581">project.build</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.13494"
|
||||
y="90.486328"
|
||||
id="text12529"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12527"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.13494"
|
||||
y="90.486328">maintainer.info</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="95.285149"
|
||||
id="text12533"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12531"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="95.285149">pkgA - <tspan
|
||||
style="fill:#2b2fd0;fill-opacity:1;stroke:none;stroke-opacity:1"
|
||||
id="tspan12637">[abc123]</tspan></tspan></text>
|
||||
<path
|
||||
style="display:inline;fill:#808000;fill-rule:evenodd;stroke:#808000;stroke-width:0.559949;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart);image-rendering:auto"
|
||||
d="M 104.56693,94.121259 64.939894,85.735877"
|
||||
id="path12705"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:label="Layer2"
|
||||
id="g5346"
|
||||
inkscape:groupmode="layer"
|
||||
style="display:none"
|
||||
sodipodi:insensitive="true">
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5284"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="35.026764"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5286"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="48.632828"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5288"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="62.238888"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5290"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="69.041916"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5292"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="55.435856"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5294"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="34.920143"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5296"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="40.535263"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5298"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="46.150379"
|
||||
cx="117.90185" />
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect5300"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="12.345296"
|
||||
ry="4.4311748" />
|
||||
<g
|
||||
id="g5308"
|
||||
inkscape:transform-center-x="20.555337"
|
||||
inkscape:transform-center-y="-0.76137238"
|
||||
transform="translate(0,6.879167)">
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect5302"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="75.344467"
|
||||
ry="4.4311748" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="36.571346"
|
||||
y="85.468292"
|
||||
id="text5306"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5304"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="36.571346"
|
||||
y="85.468292">cdefg3</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="mix-blend-mode:normal;fill:#ff0000;stroke:#971d20;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect5310"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="12.358525"
|
||||
ry="4.4311748" />
|
||||
<rect
|
||||
style="fill:#ff0000;stroke:#971d20;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect5312"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="50.890579"
|
||||
ry="4.4311748" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5314"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="41.829796"
|
||||
cx="42.27565" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="34.686447"
|
||||
y="9.218442"
|
||||
id="text5318"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5316"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="34.686447"
|
||||
y="9.218442">Pkg A</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="104.41515"
|
||||
y="9.481307"
|
||||
id="text5322"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5320"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="104.41515"
|
||||
y="9.481307">Project B</tspan></text>
|
||||
<path
|
||||
style="fill:#0000ff;fill-rule:evenodd;stroke:#3744ff;stroke-width:0.614402px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 100.90122,75.053007 V 100.05301"
|
||||
id="path5324"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.26206"
|
||||
y="78.953926"
|
||||
id="text5328"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5326"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.26206"
|
||||
y="78.953926">_config</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="84.456581"
|
||||
id="text5332"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5330"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="84.456581">project.build</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.13494"
|
||||
y="90.486328"
|
||||
id="text5336"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5334"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.13494"
|
||||
y="90.486328">maintainer.info</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="95.285149"
|
||||
id="text5342"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5340"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="95.285149">pkgA - <tspan
|
||||
style="fill:#2b2fd0;fill-opacity:1;stroke:none;stroke-opacity:1"
|
||||
id="tspan5338">[abc123]</tspan></tspan></text>
|
||||
<path
|
||||
style="display:inline;fill:#808000;fill-rule:evenodd;stroke:#808000;stroke-width:0.559949;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart);image-rendering:auto"
|
||||
d="M 104.56693,94.121259 47.038205,76.629952"
|
||||
id="path5344"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse5397"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="75.921089"
|
||||
cx="42.27565" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:label="Layer3"
|
||||
id="g374"
|
||||
inkscape:groupmode="layer"
|
||||
style="display:none">
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse310"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="35.026764"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse312"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="48.632828"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse314"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="62.238888"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse316"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="69.041916"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse318"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="55.435856"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse320"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="34.920143"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse322"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="40.535263"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse324"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="46.150379"
|
||||
cx="117.90185" />
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect326"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="12.345296"
|
||||
ry="4.4311748" />
|
||||
<g
|
||||
id="g334"
|
||||
inkscape:transform-center-x="20.555337"
|
||||
inkscape:transform-center-y="-0.76137238"
|
||||
transform="translate(0,6.879167)">
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect328"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="75.344467"
|
||||
ry="4.4311748" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="36.571346"
|
||||
y="85.468292"
|
||||
id="text332"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan330"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="36.571346"
|
||||
y="85.468292">cdefg3</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="mix-blend-mode:normal;fill:#ff0000;stroke:#971d20;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect336"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="12.358525"
|
||||
ry="4.4311748" />
|
||||
<rect
|
||||
style="fill:#ff0000;stroke:#971d20;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect338"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="56.182251"
|
||||
ry="4.4311748" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse340"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="41.829796"
|
||||
cx="42.27565" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="34.686447"
|
||||
y="9.218442"
|
||||
id="text344"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan342"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="34.686447"
|
||||
y="9.218442">Pkg A</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="104.41515"
|
||||
y="9.481307"
|
||||
id="text348"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan346"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="104.41515"
|
||||
y="9.481307">Project B</tspan></text>
|
||||
<path
|
||||
style="fill:#0000ff;fill-rule:evenodd;stroke:#3744ff;stroke-width:0.614402px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 100.90122,80.344677 V 105.34471"
|
||||
id="path350"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.26206"
|
||||
y="84.245598"
|
||||
id="text354"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan352"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.26206"
|
||||
y="84.245598">_config</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="89.748253"
|
||||
id="text358"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan356"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="89.748253">project.build</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.13494"
|
||||
y="95.778"
|
||||
id="text362"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan360"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.13494"
|
||||
y="95.778">maintainer.info</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="100.57679"
|
||||
id="text368"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan366"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="100.57679">pkgA - <tspan
|
||||
style="fill:#2b2fd0;fill-opacity:1;stroke:none;stroke-opacity:1"
|
||||
id="tspan364">[cdefg3]</tspan></tspan></text>
|
||||
<path
|
||||
style="display:inline;fill:#808000;fill-rule:evenodd;stroke:#808000;stroke-width:0.559949;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart);image-rendering:auto"
|
||||
d="M 104.48033,99.805101 65.564009,92.387895"
|
||||
id="path370"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse372"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="75.921089"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse583"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="51.442051"
|
||||
cx="117.90185" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer2"
|
||||
inkscape:label="Layer4"
|
||||
style="display:none" />
|
||||
<metadata
|
||||
id="metadata8476">
|
||||
<rdf:RDF>
|
||||
<cc:License
|
||||
rdf:about="http://creativecommons.org/licenses/by/4.0/">
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#Reproduction" />
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#Distribution" />
|
||||
<cc:requires
|
||||
rdf:resource="http://creativecommons.org/ns#Notice" />
|
||||
<cc:requires
|
||||
rdf:resource="http://creativecommons.org/ns#Attribution" />
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
|
||||
</cc:License>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<cc:license
|
||||
rdf:resource="http://creativecommons.org/licenses/by/4.0/" />
|
||||
<dc:creator>
|
||||
<cc:Agent>
|
||||
<dc:title>Adam Majer</dc:title>
|
||||
</cc:Agent>
|
||||
</dc:creator>
|
||||
<dc:date>2024-08-18</dc:date>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
</svg>
|
Before Width: | Height: | Size: 36 KiB |
301
doc/project.svg
301
doc/project.svg
@@ -1,301 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="640"
|
||||
height="480"
|
||||
viewBox="0 0 169.33333 127"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
||||
sodipodi:docname="project.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pagecheckerboard="true"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="px"
|
||||
showgrid="false"
|
||||
showborder="false"
|
||||
showguides="true"
|
||||
borderlayer="false"
|
||||
inkscape:zoom="2.2097881"
|
||||
inkscape:cx="491.6761"
|
||||
inkscape:cy="268.80405"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1055"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid12931" />
|
||||
</sodipodi:namedview>
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="TriangleStart"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto-start-reverse"
|
||||
inkscape:stockid="TriangleStart"
|
||||
markerWidth="5.3244081"
|
||||
markerHeight="6.155385"
|
||||
viewBox="0 0 5.3244081 6.1553851"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always"
|
||||
preserveAspectRatio="xMidYMid">
|
||||
<path
|
||||
transform="scale(0.5)"
|
||||
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
|
||||
d="M 5.77,0 -2.88,5 V -5 Z"
|
||||
id="path135" />
|
||||
</marker>
|
||||
<linearGradient
|
||||
id="linearGradient11538"
|
||||
inkscape:swatch="solid">
|
||||
<stop
|
||||
style="stop-color:#00ff00;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop11536" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11542"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="35.026764"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11544"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="48.632828"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11546"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="62.238888"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11548"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="69.041916"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11550"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="55.435856"
|
||||
cx="42.27565" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11552"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="34.920143"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11554"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="40.535263"
|
||||
cx="117.90185" />
|
||||
<ellipse
|
||||
style="fill:#ff0000;fill-opacity:1;stroke:#971d20;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse11556"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="46.150379"
|
||||
cx="117.90185" />
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8745"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="12.345296"
|
||||
ry="4.4311748" />
|
||||
<g
|
||||
id="g12938"
|
||||
inkscape:transform-center-x="20.555337"
|
||||
inkscape:transform-center-y="-0.76137238">
|
||||
<rect
|
||||
style="fill:#00ff00;stroke:#00a849;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8747"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="21.707499"
|
||||
y="75.344467"
|
||||
ry="4.4311748" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="36.571346"
|
||||
y="85.468292"
|
||||
id="text12313"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12311"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="36.571346"
|
||||
y="85.468292">abc123</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="mix-blend-mode:normal;fill:#ff0000;stroke:#971d20;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect8563"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="12.358525"
|
||||
ry="4.4311748" />
|
||||
<rect
|
||||
style="fill:#ff0000;stroke:#971d20;stroke-width:0.264583;stroke-opacity:1"
|
||||
id="rect8749"
|
||||
width="41.136299"
|
||||
height="17.88035"
|
||||
x="97.333694"
|
||||
y="50.890579"
|
||||
ry="4.4311748" />
|
||||
<ellipse
|
||||
style="fill:#00ff00;fill-opacity:1;stroke:#00a849;stroke-width:0.343958;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;image-rendering:auto"
|
||||
id="ellipse12343"
|
||||
ry="1.6656893"
|
||||
rx="1.6767484"
|
||||
cy="41.829796"
|
||||
cx="42.27565" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="34.686447"
|
||||
y="9.218442"
|
||||
id="text12347"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12345"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="34.686447"
|
||||
y="9.218442">Pkg A</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="104.41515"
|
||||
y="9.481307"
|
||||
id="text12351"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12349"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:4.93889px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke-width:0.291042"
|
||||
x="104.41515"
|
||||
y="9.481307">Project B</tspan></text>
|
||||
<path
|
||||
style="fill:#0000ff;fill-rule:evenodd;stroke:#3744ff;stroke-width:0.614402px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 100.90122,75.053007 V 100.05301"
|
||||
id="dirlist"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.26206"
|
||||
y="78.953926"
|
||||
id="text12521"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12519"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.26206"
|
||||
y="78.953926">_config</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="84.456581"
|
||||
id="text12525"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12523"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="84.456581">project.build</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.13494"
|
||||
y="90.486328"
|
||||
id="text12529"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12527"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.13494"
|
||||
y="90.486328">maintainer.info</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:3.175px;font-family:'Bitstream Vera Sans Mono';-inkscape-font-specification:'Bitstream Vera Sans Mono, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.291042;stroke-dasharray:none;stroke-opacity:1"
|
||||
x="107.02952"
|
||||
y="95.285149"
|
||||
id="text12533"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12531"
|
||||
style="stroke-width:0.291042"
|
||||
x="107.02952"
|
||||
y="95.285149">pkgA - <tspan
|
||||
style="fill:#2b2fd0;fill-opacity:1;stroke:none;stroke-opacity:1"
|
||||
id="tspan12637">[abc123]</tspan></tspan></text>
|
||||
<path
|
||||
style="display:inline;fill:#808000;fill-rule:evenodd;stroke:#808000;stroke-width:0.559949;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart);image-rendering:auto"
|
||||
d="M 104.56693,94.121259 64.939894,85.735877"
|
||||
id="path12705"
|
||||
inkscape:connector-type="polyline"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
<metadata
|
||||
id="metadata8476">
|
||||
<rdf:RDF>
|
||||
<cc:License
|
||||
rdf:about="http://creativecommons.org/licenses/by/4.0/">
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#Reproduction" />
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#Distribution" />
|
||||
<cc:requires
|
||||
rdf:resource="http://creativecommons.org/ns#Notice" />
|
||||
<cc:requires
|
||||
rdf:resource="http://creativecommons.org/ns#Attribution" />
|
||||
<cc:permits
|
||||
rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
|
||||
</cc:License>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<cc:license
|
||||
rdf:resource="http://creativecommons.org/licenses/by/4.0/" />
|
||||
<dc:creator>
|
||||
<cc:Agent>
|
||||
<dc:title>Adam Majer</dc:title>
|
||||
</cc:Agent>
|
||||
</dc:creator>
|
||||
<dc:date>2024-08-18</dc:date>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
</svg>
|
Before Width: | Height: | Size: 13 KiB |
@@ -324,17 +324,25 @@ func parseRequestJSONOrg(reqType string, data []byte) (org *common.Organization,
|
||||
|
||||
func main() {
|
||||
var listenAddr string
|
||||
var reqBearerToken string
|
||||
var doPublish bool
|
||||
|
||||
flag.BoolVar(&DebugMode, "debug", false, "enables debugging messages")
|
||||
flag.StringVar(&listenAddr, "listen", ListenAddrDef, "HTTP listen socket address for webhook events")
|
||||
flag.StringVar(&topicScope, "topic-domain", DefTopicDomain, "Default domain for RabbitMQ topics")
|
||||
flag.StringVar(&reqBearerToken, "token", "", "HTTP Bearer token to match")
|
||||
flag.BoolVar(&doPublish, "publish", true, "Publish to RabbitMQ. Disable for testing.")
|
||||
flag.Parse()
|
||||
|
||||
log.Println("Starting....")
|
||||
log.Printf(" * Debugging: %t\n", DebugMode)
|
||||
log.Printf(" * Listening: %s\n", listenAddr)
|
||||
log.Printf(" * Bearer token: %t\n", len(reqBearerToken) > 0)
|
||||
log.Printf(" * Publish to RabbitMQ: %t\n", doPublish)
|
||||
|
||||
connectToRabbitMQ()
|
||||
if doPublish {
|
||||
connectToRabbitMQ()
|
||||
}
|
||||
|
||||
http.HandleFunc("POST /rabbitmq-forwarder", func(res http.ResponseWriter, req *http.Request) {
|
||||
if len(req.Header.Get("Content-Type")) == 0 ||
|
||||
@@ -345,6 +353,14 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
if len(reqBearerToken) > 0 {
|
||||
authToken := req.Header.Get("Authorization")
|
||||
if len(authToken) != len(reqBearerToken)+7 || authToken[0:7] != "Bearer " || authToken[7:] != reqBearerToken {
|
||||
log.Println("Invalid Authorization request...", authToken)
|
||||
res.WriteHeader(http.StatusNetworkAuthenticationRequired)
|
||||
}
|
||||
}
|
||||
|
||||
hdr := req.Header[common.GiteaRequestHeader]
|
||||
if len(hdr) != 1 {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
@@ -395,17 +411,19 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
err = PublishMessage(org.Username, reqType, extraAction, data)
|
||||
if err != nil {
|
||||
errorStr := fmt.Sprintf("hook (%s) processing error: %v\n", reqType, err)
|
||||
res.Header().Add("Content-Type", "plain/text")
|
||||
res.Write([]byte(errorStr))
|
||||
res.WriteHeader(http.StatusBadRequest)
|
||||
if doPublish {
|
||||
err = PublishMessage(org.Username, reqType, extraAction, data)
|
||||
if err != nil {
|
||||
errorStr := fmt.Sprintf("hook (%s) processing error: %v\n", reqType, err)
|
||||
res.Header().Add("Content-Type", "plain/text")
|
||||
res.Write([]byte(errorStr))
|
||||
res.WriteHeader(http.StatusBadRequest)
|
||||
|
||||
if DebugMode {
|
||||
log.Println(errorStr)
|
||||
if DebugMode {
|
||||
log.Println(errorStr)
|
||||
}
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
res.WriteHeader(http.StatusOK)
|
||||
})
|
||||
|
24
group-review/README.md
Normal file
24
group-review/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
Group Review Bot
|
||||
================
|
||||
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
|
||||
1. Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
2. Assumes: workflow-pr needs to associate and define the PR set from
|
||||
which the groups.json is read (Base of the PrjGit PR)
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Projects where policy reviews are required.
|
||||
|
||||
Requirements
|
||||
------------
|
||||
* Gitea token to:
|
||||
+ R/W PullRequest
|
||||
+ R/W Notification
|
||||
+ R User
|
||||
|
@@ -1,6 +1,8 @@
|
||||
module src.opensuse.org/autogits/prjgit-updater
|
||||
module src.opensuse.org/autogits/group-review
|
||||
|
||||
go 1.22.3
|
||||
go 1.23.1
|
||||
|
||||
toolchain go1.24rc2
|
||||
|
||||
replace src.opensuse.org/autogits/common => ../bots-common
|
||||
|
185
group-review/main.go
Normal file
185
group-review/main.go
Normal file
@@ -0,0 +1,185 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"log"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
var reviewer *models.User
|
||||
var groupName string
|
||||
var configs common.AutogitConfigs
|
||||
|
||||
type ReviewGroupMember struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func fetchReviewGroupConfig(gitea common.Gitea, org, repo, branch, groupName string) (reviewers []ReviewGroupMember, err error) {
|
||||
data, _, err := gitea.GetRepositoryFileContent(org, repo, branch, groupName+".review.group")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(data, &reviewers)
|
||||
return
|
||||
}
|
||||
|
||||
func processNotifications(notification *models.NotificationThread, gitea common.Gitea) {
|
||||
rx := regexp.MustCompile(`^https://src\.(?:open)?suse\.(?:org|de)/api/v\d+/repos/(?<org>[a-zA-Z0-9]+)/(?<project>[_a-zA-Z0-9]+)/issues/(?<num>[0-9]+)$`)
|
||||
subject := notification.Subject
|
||||
match := rx.FindStringSubmatch(subject.URL)
|
||||
if match == nil {
|
||||
log.Panicf("Unexpected format of notification: %s", subject.URL)
|
||||
}
|
||||
|
||||
log.Println("processing")
|
||||
log.Println("project:", match[2])
|
||||
log.Println("org: ", match[1])
|
||||
log.Println("number: ", match[3])
|
||||
|
||||
org := match[1]
|
||||
repo := match[2]
|
||||
id, _ := strconv.ParseInt(match[3], 10, 64)
|
||||
|
||||
pr, err := gitea.GetPullRequest(org, repo, id)
|
||||
if err != nil {
|
||||
log.Println("No PR associated with review:", subject.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
config := configs.GetPrjGitConfig(org, repo, pr.Base.Name)
|
||||
|
||||
log.Println("PR state:", pr.State)
|
||||
if pr.State == "closed" {
|
||||
// dismiss the review
|
||||
log.Println(" -- closed request, so nothing to review")
|
||||
gitea.SetNotificationRead(notification.ID)
|
||||
return
|
||||
}
|
||||
|
||||
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
|
||||
if err != nil {
|
||||
log.Println("No reviews associated with request:", subject.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
prs, err := common.FetchPRSet(gitea, org, repo, id, config)
|
||||
if err != nil {
|
||||
log.Printf("Cannot fetch PRSet for %s/%s/%d. Error: %v\n", org, repo, id, err)
|
||||
return
|
||||
}
|
||||
|
||||
prjGitPR, err := prs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
log.Println("Cannot fetch PrjGit PR.", err)
|
||||
return
|
||||
}
|
||||
|
||||
groupMembers, err := fetchReviewGroupConfig(gitea, prjGitPR.Base.Repo.Owner.UserName, prjGitPR.Base.Repo.Name, prjGitPR.Base.Sha, groupName)
|
||||
if err != nil {
|
||||
log.Println("Cannot fetch ReviewGroup definition:", groupName, err)
|
||||
}
|
||||
|
||||
for _, review := range reviews {
|
||||
user := ""
|
||||
if !review.Stale &&
|
||||
review.State == common.ReviewStateApproved &&
|
||||
slices.ContainsFunc(groupMembers, func(g ReviewGroupMember) bool {
|
||||
if g.Name == review.User.UserName {
|
||||
user = g.Name
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}) &&
|
||||
strings.Contains(review.Body, "/"+groupName+" LGTM\n") {
|
||||
|
||||
gitea.AddReviewComment(pr, common.ReviewStateApproved, "Signed off by: "+user)
|
||||
if err := gitea.SetNotificationRead(notification.ID); err != nil {
|
||||
log.Println("Cannot set notification as read", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func periodReviewCheck(gitea common.Gitea) {
|
||||
notifications, err := gitea.GetPullNotifications(nil)
|
||||
if err != nil {
|
||||
log.Println(" EEE Error fetching unread notifications: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, notification := range notifications {
|
||||
processNotifications(notification, gitea)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
giteaHost := flag.String("gitea-host", "src.opensuse.org", "Gitea instance used for reviews")
|
||||
rabbitMqHost := flag.String("rabbit-host", "rabbit.opensuse.org", "RabbitMQ instance where Gitea webhook notifications are sent")
|
||||
interval := flag.Int64("internval", 5, "Notification polling interval in minutes (min 1 min)")
|
||||
configFile := flag.String("config", "", "PrjGit listing config file")
|
||||
flag.Parse()
|
||||
|
||||
args := flag.Args()
|
||||
if len(args) != 1 {
|
||||
log.Println(" syntax:")
|
||||
log.Println(" group-review [OPTIONS] <review-group-name>")
|
||||
log.Println()
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
groupName = args[0]
|
||||
|
||||
configData, err := common.ReadConfigFile(*configFile)
|
||||
if err != nil {
|
||||
log.Println("Failed to read config file", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
gitea := common.AllocateGiteaTransport(*giteaHost)
|
||||
configs, err = common.ResolveWorkflowConfigs(gitea, configData)
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
reviewer, err = gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
log.Panicln("Cannot fetch review user: %w", err)
|
||||
}
|
||||
|
||||
if *interval < 1 {
|
||||
*interval = 1
|
||||
}
|
||||
|
||||
log.Println(" ** processing group reviews for group:", groupName)
|
||||
log.Println(" ** username in Gitea:", reviewer.UserName)
|
||||
log.Println(" ** polling internval:", *interval, "min")
|
||||
log.Println(" ** connecting to RabbitMQ:", *rabbitMqHost)
|
||||
|
||||
if groupName != reviewer.UserName {
|
||||
log.Println(" ***** Reviewer does not match group name. Aborting. *****")
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
periodReviewCheck(gitea)
|
||||
time.Sleep(time.Duration(*interval * int64(time.Minute)))
|
||||
}
|
||||
}
|
@@ -1,3 +1 @@
|
||||
package main
|
||||
|
||||
// TODO
|
@@ -1,17 +1,17 @@
|
||||
module src.opensuse.org/autogits/obs-staging-bot
|
||||
|
||||
go 1.22.3
|
||||
go 1.23.1
|
||||
|
||||
toolchain go1.24rc2
|
||||
|
||||
replace src.opensuse.org/autogits/common => ../bots-common
|
||||
|
||||
require (
|
||||
github.com/mattn/go-sqlite3 v1.14.22
|
||||
github.com/rabbitmq/amqp091-go v1.10.0
|
||||
src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
@@ -29,7 +29,6 @@ require (
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/oapi-codegen/runtime v1.1.1 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.14.0 // indirect
|
||||
|
@@ -1,9 +1,5 @@
|
||||
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
|
||||
github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ=
|
||||
github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk=
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -38,19 +34,14 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro=
|
||||
github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
||||
@@ -61,7 +52,6 @@ github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzuk
|
||||
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
@@ -78,6 +68,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
|
@@ -21,12 +21,14 @@ package main
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -53,19 +55,16 @@ func failOnError(err error, msg string) {
|
||||
}
|
||||
}
|
||||
|
||||
func fetchPrGit(h *common.RequestHandler, pr *models.PullRequest) error {
|
||||
func fetchPrGit(git common.Git, pr *models.PullRequest) error {
|
||||
// clone PR head and base and return path
|
||||
if h.HasError() {
|
||||
return h.Error
|
||||
}
|
||||
if _, err := os.Stat(path.Join(h.GitPath, pr.Head.Sha)); os.IsNotExist(err) {
|
||||
h.GitExec("", "clone", "--depth", "1", pr.Head.Repo.CloneURL, pr.Head.Sha)
|
||||
h.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.Base.Sha)
|
||||
if _, err := os.Stat(path.Join(git.GetPath(), pr.Head.Sha)); os.IsNotExist(err) {
|
||||
git.GitExec("", "clone", "--depth", "1", pr.Head.Repo.CloneURL, pr.Head.Sha)
|
||||
git.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.Base.Sha)
|
||||
} else if err != nil {
|
||||
h.Error = err
|
||||
return err
|
||||
}
|
||||
|
||||
return h.Error
|
||||
return nil
|
||||
}
|
||||
|
||||
func getObsProjectAssociatedWithPr(baseProject string, pr *models.PullRequest) string {
|
||||
@@ -99,13 +98,13 @@ const (
|
||||
BuildStatusSummaryUnknown = 4
|
||||
)
|
||||
|
||||
func processBuildStatus(h *common.RequestHandler, project, refProject *common.BuildResultList) BuildStatusSummary {
|
||||
func processBuildStatus(project, refProject *common.BuildResultList) BuildStatusSummary {
|
||||
if _, finished := project.BuildResultSummary(); !finished {
|
||||
return BuildStatusSummaryBuilding
|
||||
}
|
||||
|
||||
if _, finished := refProject.BuildResultSummary(); !finished {
|
||||
h.LogError("refProject not finished building??")
|
||||
log.Println("refProject not finished building??")
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
|
||||
@@ -130,7 +129,7 @@ func processBuildStatus(h *common.RequestHandler, project, refProject *common.Bu
|
||||
repoRes := &project.Result[i]
|
||||
repoResStatus, ok := common.ObsRepoStatusDetails[repoRes.Code]
|
||||
if !ok {
|
||||
h.LogError("cannot find code: %s", repoRes.Code)
|
||||
log.Println("cannot find code:", repoRes.Code)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
if !repoResStatus.Finished {
|
||||
@@ -140,9 +139,8 @@ func processBuildStatus(h *common.RequestHandler, project, refProject *common.Bu
|
||||
for _, pkg := range repoRes.Status {
|
||||
pkgStatus, ok := common.ObsBuildStatusDetails[pkg.Code]
|
||||
if !ok {
|
||||
h.LogError("Unknown package build status: %s for %s", pkg.Code, pkg.Package)
|
||||
h.LogError("Details: %s", pkg.Details)
|
||||
return BuildStatusSummaryUnknown
|
||||
log.Println("Unknown package build status:", pkg.Code, "for", pkg.Package)
|
||||
log.Println("Details:", pkg.Details)
|
||||
}
|
||||
|
||||
if pkgStatus.Success {
|
||||
@@ -159,9 +157,9 @@ func processBuildStatus(h *common.RequestHandler, project, refProject *common.Bu
|
||||
|
||||
slices.SortFunc(refProject.Result, BuildResultSorter)
|
||||
|
||||
h.Log("comparing results %d vs. ref %d", len(project.Result), len(refProject.Result))
|
||||
log.Printf("comparing results %d vs. ref %d\n", len(project.Result), len(refProject.Result))
|
||||
for i := 0; i < len(project.Result); i++ {
|
||||
h.Log("searching for %s/%s", project.Result[i].Repository, project.Result[i].Arch)
|
||||
log.Println("searching for", project.Result[i].Repository, "/", project.Result[i].Arch)
|
||||
j := 0
|
||||
found:
|
||||
for ; j < len(refProject.Result); j++ {
|
||||
@@ -170,8 +168,8 @@ func processBuildStatus(h *common.RequestHandler, project, refProject *common.Bu
|
||||
continue
|
||||
}
|
||||
|
||||
h.Log("found match for %s/%s @ %d", project.Result[i].Repository, project.Result[i].Arch, j)
|
||||
res := processRepoBuildStatus(h, project.Result[i].Status, refProject.Result[j].Status)
|
||||
log.Printf("found match for %s/%s @ %d\n", project.Result[i].Repository, project.Result[i].Arch, j)
|
||||
res := processRepoBuildStatus(project.Result[i].Status, refProject.Result[j].Status)
|
||||
switch res {
|
||||
case BuildStatusSummarySuccess:
|
||||
break found
|
||||
@@ -180,16 +178,16 @@ func processBuildStatus(h *common.RequestHandler, project, refProject *common.Bu
|
||||
}
|
||||
}
|
||||
|
||||
h.Log("%d", j)
|
||||
log.Println(j)
|
||||
if j >= len(refProject.Result) {
|
||||
h.LogError("Cannot find results... %#v \n %#v\n", project.Result[i], refProject.Result)
|
||||
log.Printf("Cannot find results... %#v \n %#v\n", project.Result[i], refProject.Result)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
}
|
||||
return BuildStatusSummarySuccess
|
||||
}
|
||||
|
||||
func processRepoBuildStatus(h *common.RequestHandler, results, ref []common.PackageBuildStatus) BuildStatusSummary {
|
||||
func processRepoBuildStatus(results, ref []common.PackageBuildStatus) BuildStatusSummary {
|
||||
PackageBuildStatusSorter := func(a, b common.PackageBuildStatus) int {
|
||||
return strings.Compare(a.Package, b.Package)
|
||||
}
|
||||
@@ -202,7 +200,7 @@ func processRepoBuildStatus(h *common.RequestHandler, results, ref []common.Pack
|
||||
for i := 0; i < len(results); i++ {
|
||||
res, ok := common.ObsBuildStatusDetails[results[i].Code]
|
||||
if !ok {
|
||||
h.LogError("unknown package result code: %s for package %s", results[i].Code, results[i].Package)
|
||||
log.Printf("unknown package result code: %s for package %s\n", results[i].Code, results[i].Package)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
|
||||
@@ -218,12 +216,12 @@ func processRepoBuildStatus(h *common.RequestHandler, results, ref []common.Pack
|
||||
if j < len(results) && results[i].Package == ref[j].Package {
|
||||
refRes, ok := common.ObsBuildStatusDetails[ref[j].Code]
|
||||
if !ok {
|
||||
h.LogError("unknown package result code: %s for package %s", ref[j].Code, ref[j].Package)
|
||||
log.Printf("unknown package result code: %s for package %s\n", ref[j].Code, ref[j].Package)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
|
||||
if !refRes.Finished {
|
||||
h.LogError("not finished building in reference project?!")
|
||||
log.Println("not finished building in reference project?!")
|
||||
}
|
||||
|
||||
if refRes.Success {
|
||||
@@ -236,18 +234,24 @@ func processRepoBuildStatus(h *common.RequestHandler, results, ref []common.Pack
|
||||
return BuildStatusSummarySuccess
|
||||
}
|
||||
|
||||
func generateObsPrjMeta(h *common.RequestHandler, pr *models.PullRequest, obsClient *common.ObsClient) (*common.ProjectMeta, error) {
|
||||
h.Log("repo content fetching ...")
|
||||
err := fetchPrGit(h, pr)
|
||||
func generateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, obsClient *common.ObsClient) (*common.ProjectMeta, error) {
|
||||
log.Println("repo content fetching ...")
|
||||
err := fetchPrGit(git, pr)
|
||||
if err != nil {
|
||||
h.LogError("Cannot fetch PR git: %s", pr.URL)
|
||||
log.Println("Cannot fetch PR git:", pr.URL)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// find modified submodules and new submodules -- build them
|
||||
dir := pr.Head.Sha
|
||||
headSubmodules := h.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
baseSubmodules := h.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
headSubmodules, err := git.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
baseSubmodules, err := git.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
modifiedOrNew := make([]string, 0, 16)
|
||||
for pkg, headOid := range headSubmodules {
|
||||
@@ -255,24 +259,24 @@ func generateObsPrjMeta(h *common.RequestHandler, pr *models.PullRequest, obsCli
|
||||
modifiedOrNew = append(modifiedOrNew, pkg)
|
||||
}
|
||||
}
|
||||
buildPrj := string(bytes.TrimSpace(h.GitCatFile(dir, pr.Head.Sha, "project.build")))
|
||||
prjBuild, err := git.GitCatFile(dir, pr.Head.Sha, "project.build")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buildPrj := string(bytes.TrimSpace(prjBuild))
|
||||
|
||||
if len(buildPrj) < 1 {
|
||||
_, err := h.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
return nil, h.Error
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
return nil, fmt.Errorf("Cannot find reference project for %s PR#%d", pr.Base.Name, pr.Index)
|
||||
}
|
||||
if h.HasError() {
|
||||
h.LogPlainError(h.Error)
|
||||
return nil, h.Error
|
||||
}
|
||||
|
||||
meta, err := obsClient.GetProjectMeta(buildPrj)
|
||||
if err != nil {
|
||||
h.Log("error fetching project meta for %s: %v", buildPrj, err)
|
||||
log.Println("error fetching project meta for", buildPrj, ". Err:", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -304,23 +308,23 @@ func generateObsPrjMeta(h *common.RequestHandler, pr *models.PullRequest, obsCli
|
||||
Repository: r.Name,
|
||||
}}
|
||||
}
|
||||
h.Log("%#v", meta)
|
||||
log.Println(meta)
|
||||
return meta, nil
|
||||
}
|
||||
|
||||
func startOrUpdateBuild(h *common.RequestHandler, pr *models.PullRequest, obsClient *common.ObsClient) error {
|
||||
h.Log("fetching OBS project Meta")
|
||||
func startOrUpdateBuild(git common.Git, gitea common.Gitea, pr *models.PullRequest, obsClient *common.ObsClient) error {
|
||||
log.Println("fetching OBS project Meta")
|
||||
obsPrProject := getObsProjectAssociatedWithPr(obsClient.HomeProject, pr)
|
||||
meta, err := obsClient.GetProjectMeta(obsPrProject)
|
||||
if err != nil {
|
||||
h.Log("error fetching project meta for %s: %v", obsPrProject, err)
|
||||
log.Println("error fetching project meta for", obsPrProject, ":", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if meta != nil {
|
||||
path, err := url.Parse(meta.ScmSync)
|
||||
if err != nil {
|
||||
h.Log("Cannot parse SCMSYNC url: '%s' .. regenerating build", meta.ScmSync)
|
||||
log.Println("Cannot parse SCMSYNC url: '", meta.ScmSync, "' .. regenerating build")
|
||||
meta = nil
|
||||
} else {
|
||||
if path.Fragment == pr.Head.Sha {
|
||||
@@ -328,13 +332,13 @@ func startOrUpdateBuild(h *common.RequestHandler, pr *models.PullRequest, obsCli
|
||||
return nil
|
||||
}
|
||||
// build needs update
|
||||
h.Log("Detected Head update... regenerating build...")
|
||||
log.Println("Detected Head update... regenerating build...")
|
||||
meta = nil
|
||||
}
|
||||
}
|
||||
if meta == nil {
|
||||
// new build
|
||||
meta, err = generateObsPrjMeta(h, pr, obsClient)
|
||||
meta, err = generateObsPrjMeta(git, gitea, pr, obsClient)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -342,43 +346,74 @@ func startOrUpdateBuild(h *common.RequestHandler, pr *models.PullRequest, obsCli
|
||||
|
||||
err = obsClient.SetProjectMeta(meta)
|
||||
if err != nil {
|
||||
h.Error = err
|
||||
h.LogError("cannot create meta project: %#v", err)
|
||||
return h.Error
|
||||
log.Println("cannot create meta project:", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processPullNotification(h *common.RequestHandler, thread *models.NotificationThread) {
|
||||
rx := regexp.MustCompile(`^https://src\.(?:open)?suse\.(?:org|de)/api/v\d+/repos/(?<org>[a-zA-Z0-9]+)/(?<project>[_a-zA-Z0-9]+)/issues/(?<num>[0-9]+)$`)
|
||||
func processPullNotification(gitea common.Gitea, thread *models.NotificationThread) {
|
||||
defer func() {
|
||||
err := recover()
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
log.Println(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
gh := common.GitHandlerGeneratorImpl{}
|
||||
git, err := gh.CreateGitHandler(GitAuthor, "noaddress@suse.de", BotName)
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
rx := regexp.MustCompile(`^https://src\.(?:open)?suse\.(?:org|de)/api/v\d+/repos/(?<org>[-_a-zA-Z0-9]+)/(?<project>[-_a-zA-Z0-9]+)/issues/(?<num>[0-9]+)$`)
|
||||
notification := thread.Subject
|
||||
match := rx.FindStringSubmatch(notification.URL)
|
||||
if match == nil {
|
||||
log.Panicf("Unexpected format of notification: %s", notification.URL)
|
||||
}
|
||||
|
||||
h.Log("processing")
|
||||
h.Log("project: %s", match[2])
|
||||
h.Log("org: %s", match[1])
|
||||
h.Log("number: %s", match[3])
|
||||
log.Println("processing")
|
||||
log.Println("project:", match[2])
|
||||
log.Println("org: ", match[1])
|
||||
log.Println("number: ", match[3])
|
||||
|
||||
org := match[1]
|
||||
repo := match[2]
|
||||
id, _ := strconv.ParseInt(match[3], 10, 64)
|
||||
|
||||
pr, reviews, err := h.GetPullRequestAndReviews(org, repo, id)
|
||||
pr, err := gitea.GetPullRequest(org, repo, id)
|
||||
if err != nil {
|
||||
h.LogError("No PR associated with review: %s. Error: %v", notification.URL, err)
|
||||
log.Println("No PR associated with review:", notification.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
obsClient, err := common.NewObsClient("api.opensuse.org")
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
log.Println("PR state:", pr.State)
|
||||
if pr.State == "closed" {
|
||||
// dismiss the review
|
||||
log.Println(" -- closed request, so nothing to review")
|
||||
gitea.SetNotificationRead(thread.ID)
|
||||
return
|
||||
}
|
||||
|
||||
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
|
||||
if err != nil {
|
||||
log.Println("No reviews associated with request:", notification.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
obsClient, err := common.NewObsClient(obsApiHost)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(BuildRoot) > 0 {
|
||||
obsClient.HomeProject = BuildRoot
|
||||
}
|
||||
|
||||
reviewRequested := false
|
||||
for _, reviewer := range pr.RequestedReviewers {
|
||||
if reviewer.UserName == Username {
|
||||
@@ -388,8 +423,8 @@ func processPullNotification(h *common.RequestHandler, thread *models.Notificati
|
||||
}
|
||||
|
||||
if !reviewRequested {
|
||||
h.Log("Review not requested in notification. Setting to status 'read'")
|
||||
h.SetNotificationRead(thread.ID)
|
||||
log.Println("Review not requested in notification. Setting to status 'read'")
|
||||
gitea.SetNotificationRead(thread.ID)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -407,37 +442,47 @@ func processPullNotification(h *common.RequestHandler, thread *models.Notificati
|
||||
|
||||
for idx := len(reviews) - 1; idx >= 0; idx-- {
|
||||
review := reviews[idx]
|
||||
h.Log("state: %s, body: %s, id:%d\n", string(review.State), review.Body, review.ID)
|
||||
log.Printf("state: %s, body: %s, id:%d\n", string(review.State), review.Body, review.ID)
|
||||
|
||||
if review.User.UserName != "autogits_obs_staging_bot" {
|
||||
if review.User.UserName != Username {
|
||||
continue
|
||||
}
|
||||
|
||||
h.Log("processing state...")
|
||||
log.Println("processing state...")
|
||||
|
||||
switch review.State {
|
||||
|
||||
// create build project, if doesn't exist, and add it to pending requests
|
||||
case common.ReviewStateUnknown, common.ReviewStateRequestReview:
|
||||
if err := startOrUpdateBuild(h, pr, obsClient); err != nil {
|
||||
if err := startOrUpdateBuild(git, gitea, pr, obsClient); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
msg := "Build is started in https://build.opensuse.org/project/show/" +
|
||||
msg := "Build is started in https://" + obsWebHost + "/project/show/" +
|
||||
getObsProjectAssociatedWithPr(obsClient.HomeProject, pr)
|
||||
h.AddReviewComment(pr, common.ReviewStatePending, msg)
|
||||
gitea.AddReviewComment(pr, common.ReviewStatePending, msg)
|
||||
|
||||
case common.ReviewStatePending:
|
||||
err := fetchPrGit(h, pr)
|
||||
if err := startOrUpdateBuild(git, gitea, pr, obsClient); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err := fetchPrGit(git, pr)
|
||||
if err != nil {
|
||||
h.LogError("Cannot fetch PR git: %s", pr.URL)
|
||||
log.Println("Cannot fetch PR git:", pr.URL)
|
||||
return
|
||||
}
|
||||
|
||||
// find modified submodules and new submodules -- build them
|
||||
dir := pr.Head.Sha
|
||||
headSubmodules := h.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
baseSubmodules := h.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
headSubmodules, err := git.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
baseSubmodules, err := git.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
modifiedOrNew := make([]string, 0, 16)
|
||||
for pkg, headOid := range headSubmodules {
|
||||
@@ -446,20 +491,20 @@ func processPullNotification(h *common.RequestHandler, thread *models.Notificati
|
||||
}
|
||||
}
|
||||
|
||||
h.Log("repo content fetching ...")
|
||||
refPrj := string(bytes.TrimSpace(h.GitCatFile(dir, pr.Head.Sha, "project.build")))
|
||||
log.Println("repo content fetching ...")
|
||||
refPrjData, err := git.GitCatFile(dir, pr.Head.Sha, "project.build")
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
refPrj := string(bytes.TrimSpace(refPrjData))
|
||||
|
||||
if len(refPrj) < 1 {
|
||||
_, err := h.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
h.LogError("Cannot find reference project for %s PR#%d", pr.Base.Name, pr.Index)
|
||||
return
|
||||
}
|
||||
if h.HasError() {
|
||||
h.LogPlainError(h.Error)
|
||||
log.Printf("Cannot find reference project for %s PR#%d\n", pr.Base.Name, pr.Index)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -468,83 +513,116 @@ func processPullNotification(h *common.RequestHandler, thread *models.Notificati
|
||||
if err != nil {
|
||||
if errors.Is(err, common.ObsProjectNotFound{Project: obsProject}) {
|
||||
// recreate missing project
|
||||
h.LogError("missing OBS project ... recreating '%s': %v", obsProject, err)
|
||||
startOrUpdateBuild(h, pr, obsClient)
|
||||
|
||||
log.Printf("missing OBS project ... recreating '%s': %v\n", obsProject, err)
|
||||
startOrUpdateBuild(git, gitea, pr, obsClient)
|
||||
return
|
||||
}
|
||||
h.LogError("failed fetching build status for '%s': %v", obsProject, err)
|
||||
log.Printf("failed fetching build status for '%s': %v\n", obsProject, err)
|
||||
return
|
||||
}
|
||||
|
||||
refProjectResult, err := obsClient.BuildStatus(refPrj, prjResult.GetPackageList()...)
|
||||
if err != nil {
|
||||
h.LogError("failed fetching ref project status for '%s': %v", refPrj, err)
|
||||
log.Printf("failed fetching ref project status for '%s': %v\n", refPrj, err)
|
||||
}
|
||||
buildStatus := processBuildStatus(h, prjResult, refProjectResult)
|
||||
buildStatus := processBuildStatus(prjResult, refProjectResult)
|
||||
|
||||
switch buildStatus {
|
||||
case BuildStatusSummarySuccess:
|
||||
_, err := h.AddReviewComment(pr, common.ReviewStateApproved, "Build successful")
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "Build successful")
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
log.Println(err)
|
||||
}
|
||||
case BuildStatusSummaryFailed:
|
||||
_, err := h.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed")
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed")
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
h.Log("Build status waiting: %d", buildStatus)
|
||||
log.Println("Build status waiting:", buildStatus)
|
||||
// waiting for build results -- nothing to do
|
||||
|
||||
case common.ReviewStateApproved:
|
||||
// done, mark notification as read
|
||||
h.Log("processing request for success build ...")
|
||||
h.SetNotificationRead(thread.ID)
|
||||
log.Println("processing request for success build ...")
|
||||
gitea.SetNotificationRead(thread.ID)
|
||||
|
||||
case common.ReviewStateRequestChanges:
|
||||
// build failures, nothing to do here, mark notification as read
|
||||
h.Log("processing request for failed request changes...")
|
||||
h.SetNotificationRead(thread.ID)
|
||||
log.Println("processing request for failed request changes...")
|
||||
gitea.SetNotificationRead(thread.ID)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
func pollWorkNotifications() {
|
||||
h := common.CreateRequestHandler(GitAuthor, BotName)
|
||||
data, err := h.GetPullNotifications(nil)
|
||||
func pollWorkNotifications(giteaHost string) {
|
||||
gitea := common.AllocateGiteaTransport(giteaHost)
|
||||
data, err := gitea.GetPullNotifications(nil)
|
||||
|
||||
if err != nil {
|
||||
h.LogPlainError(err)
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
if data != nil {
|
||||
log.Println("Processing", len(data), "notifications.")
|
||||
for _, notification := range data {
|
||||
switch notification.Subject.Type {
|
||||
case "Pull":
|
||||
processPullNotification(h, notification)
|
||||
default:
|
||||
h.SetNotificationRead(notification.ID)
|
||||
log.Println(notification.ID, "--", notification.Subject)
|
||||
|
||||
if !ListPullNotificationsOnly && (ProcessIDOnly < 0 || ProcessIDOnly == notification.ID) {
|
||||
switch notification.Subject.Type {
|
||||
case "Pull":
|
||||
processPullNotification(gitea, notification)
|
||||
default:
|
||||
gitea.SetNotificationRead(notification.ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var ListPullNotificationsOnly bool
|
||||
var ProcessIDOnly int64
|
||||
var Debug bool
|
||||
var BuildRoot string
|
||||
var obsApiHost string
|
||||
var obsWebHost string
|
||||
|
||||
func ObsWebHostFromApiHost(apihost string) string {
|
||||
if len(apihost) > 4 && apihost[0:4] == "api." {
|
||||
return "build" + apihost[3:]
|
||||
}
|
||||
|
||||
return apihost
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.BoolVar(&Debug, "debug", false, "One-shot run. Use for debugging")
|
||||
flag.BoolVar(&ListPullNotificationsOnly, "list-notifications-only", false, "Only lists notifications without acting on them")
|
||||
flag.Int64Var(&ProcessIDOnly, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
|
||||
flag.StringVar(&BuildRoot, "build-root", "", "Default build location for staging projects. Default is bot's home project")
|
||||
giteaHost := flag.String("gitea", "src.opensuse.org", "Gitea instance")
|
||||
flag.StringVar(&obsApiHost, "obs", "api.opensuse.org", "API for OBS instance")
|
||||
flag.StringVar(&obsWebHost, "obs-web", "", "Web OBS instance, if not derived from the obs config")
|
||||
flag.Parse()
|
||||
|
||||
if len(obsWebHost) == 0 {
|
||||
obsWebHost = ObsWebHostFromApiHost(obsApiHost)
|
||||
}
|
||||
|
||||
failOnError(common.RequireGiteaSecretToken(), "Cannot find GITEA_TOKEN")
|
||||
failOnError(common.RequireObsSecretToken(), "Cannot find OBS_USER and OBS_PASSWORD")
|
||||
|
||||
// go ProcessingObsMessages("rabbit.opensuse.org", "opensuse", "opensuse", "")
|
||||
|
||||
for {
|
||||
pollWorkNotifications()
|
||||
pollWorkNotifications(*giteaHost)
|
||||
if Debug {
|
||||
break
|
||||
}
|
||||
log.Println("Poll cycle finished")
|
||||
time.Sleep(10 * time.Minute)
|
||||
}
|
||||
|
||||
stuck := make(chan int)
|
||||
<-stuck
|
||||
}
|
||||
|
@@ -1,38 +1,95 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
func TestPRtoObsProjectMapping(t *testing.T) {
|
||||
pr := models.PullRequest{
|
||||
Base: &models.PRBranchInfo {
|
||||
Repo: &models.Repository {
|
||||
Name: "Repo",
|
||||
Owner: &models.User {
|
||||
UserName: "foobar",
|
||||
},
|
||||
},
|
||||
func TestObsAPIHostFromWebHost(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
api string
|
||||
web string
|
||||
}{
|
||||
{
|
||||
name: "api host",
|
||||
api: "api.suse.de",
|
||||
web: "build.suse.de",
|
||||
},
|
||||
{
|
||||
name: "api host",
|
||||
api: "api.opensuse.org",
|
||||
web: "build.opensuse.org",
|
||||
},
|
||||
{
|
||||
name: "other host",
|
||||
api: "someapi.suse.de",
|
||||
web: "someapi.suse.de",
|
||||
},
|
||||
{
|
||||
name: "short host",
|
||||
api: "s",
|
||||
web: "s",
|
||||
},
|
||||
Index: 10,
|
||||
}
|
||||
p := getObsProjectAssociatedWithPr("home:foo", &pr)
|
||||
if p != "home:foo:foobar:Repo:PR:10" {
|
||||
t.Errorf("invalid project: %s", p)
|
||||
}
|
||||
|
||||
pr.Base.Repo.Name = "_FooBar"
|
||||
p = getObsProjectAssociatedWithPr("home:foo", &pr)
|
||||
if p != "home:foo:foobar:XFooBar:PR:10" {
|
||||
t.Errorf("invalid project: %s", p)
|
||||
}
|
||||
|
||||
pr.Base.Repo.Owner.UserName = "_some_thing"
|
||||
p = getObsProjectAssociatedWithPr("home:foo", &pr)
|
||||
if p != "home:foo:Xsome_thing:XFooBar:PR:10" {
|
||||
t.Errorf("invalid project: %s", p)
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if r := ObsWebHostFromApiHost(test.api); r != test.web {
|
||||
t.Error("Expected:", test.web, "but observed", r)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRtoObsProjectMapping(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
pr string // org/repo/prNo
|
||||
|
||||
expectedProject string
|
||||
}{
|
||||
{
|
||||
name: "Regular project",
|
||||
pr: "foobar/Repo/10",
|
||||
expectedProject: "home:foo:foobar:Repo:PR:10",
|
||||
},
|
||||
{
|
||||
name: "underscore repo name",
|
||||
pr: "foobar/_FooBar/10",
|
||||
expectedProject: "home:foo:foobar:XFooBar:PR:10",
|
||||
},
|
||||
{
|
||||
name: "Underscore repo and project",
|
||||
pr: "_some_thing/_FooBar/11",
|
||||
expectedProject: "home:foo:Xsome_thing:XFooBar:PR:11",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
s := strings.Split(test.pr, "/")
|
||||
n, _ := strconv.ParseInt(s[2], 10, 64)
|
||||
|
||||
pr := models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Repo: &models.Repository{
|
||||
Name: s[1],
|
||||
Owner: &models.User{
|
||||
UserName: s[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
Index: n,
|
||||
}
|
||||
|
||||
p := getObsProjectAssociatedWithPr("home:foo", &pr)
|
||||
if p != test.expectedProject {
|
||||
t.Error("invalid project:", p, "Expected:", test.expectedProject)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
1
pr-review/.gitignore
vendored
1
pr-review/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
pr-review
|
@@ -1,21 +0,0 @@
|
||||
PR Review bot
|
||||
=============
|
||||
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Detects a PackageGit PR creation against a package and creates a coresponsing PR against the ProjectGit
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated
|
||||
* Stores reference to the PackageGit PR in the headers of the ProjectGit PR comments, for later reference
|
||||
* this allows ProjectGit PR to be merged to seperated later (via another tool, for example)
|
||||
* Initiates all staging workflows via review requests
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Any project (devel, etc) that accepts PR
|
||||
|
@@ -1,216 +0,0 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
const (
|
||||
ListenAddr = "[::1]:8001"
|
||||
|
||||
GitAuthor = "GiteaBot - AutoStaging"
|
||||
PrReview = "pr-review"
|
||||
)
|
||||
|
||||
func fetchPrGit(h *common.RequestHandler, pr *models.PullRequest) error {
|
||||
// clone PR head and base and return path
|
||||
if h.HasError() {
|
||||
return h.Error
|
||||
}
|
||||
if _, err := os.Stat(path.Join(h.GitPath, pr.Head.Sha)); os.IsNotExist(err) {
|
||||
h.GitExec("", "clone", "--depth", "1", pr.Head.Repo.CloneURL, pr.Head.Sha)
|
||||
h.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.Base.Sha)
|
||||
} else if err != nil {
|
||||
h.Error = err
|
||||
}
|
||||
|
||||
return h.Error
|
||||
}
|
||||
|
||||
func processPullRequestClosed(h *common.RequestHandler) error {
|
||||
// this needs to be moved to pull merger
|
||||
return nil
|
||||
/*
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
if req.Repository.Name != common.DefaultGitPrj {
|
||||
// we only handle project git PR updates here
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := fetchPrGit(h, req.Pull_Request); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
headSubmodules := h.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
baseSubmodules := h.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
return nil
|
||||
*/
|
||||
}
|
||||
|
||||
func processPrjGitPullRequestSync(h *common.RequestHandler) error {
|
||||
// req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func prGitBranchNameForPR(req *common.PullRequestAction) string {
|
||||
return fmt.Sprintf("PR_%s#%d", req.Repository.Name, req.Pull_Request.Number)
|
||||
}
|
||||
|
||||
func updateOrCreatePRBranch(h *common.RequestHandler, commitMsg, branchName string) {
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
h.GitExec(common.DefaultGitPrj, "submodule", "update", "--init", "--checkout", "--depth", "1", req.Repository.Name)
|
||||
h.GitExec(path.Join(common.DefaultGitPrj, req.Repository.Name), "fetch", "--depth", "1", "origin", req.Pull_Request.Head.Sha)
|
||||
h.GitExec(path.Join(common.DefaultGitPrj, req.Repository.Name), "checkout", req.Pull_Request.Head.Sha)
|
||||
h.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", commitMsg)
|
||||
h.GitExec(common.DefaultGitPrj, "push", "-f", "origin", branchName)
|
||||
}
|
||||
|
||||
func processPullRequestSync(h *common.RequestHandler) error {
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
if req.Repository.Name == common.DefaultGitPrj {
|
||||
return processPrjGitPullRequestSync(h)
|
||||
}
|
||||
|
||||
// need to verify that submodule in the PR for prjgit
|
||||
// is still pointing to the HEAD of the PR
|
||||
prjPr := h.GetAssociatedPrjGitPR(req)
|
||||
|
||||
if h.HasError() {
|
||||
h.LogError("%v", h.Error)
|
||||
return h.Error
|
||||
}
|
||||
h.Log("associated pr: %v", prjPr)
|
||||
|
||||
h.GitExec("", "clone", "--branch", prjPr.Head.Name, "--depth", "1", prjPr.Head.Repo.SSHURL, common.DefaultGitPrj)
|
||||
commitId, ok := h.GitSubmoduleCommitId(common.DefaultGitPrj, req.Repository.Name, prjPr.Head.Sha)
|
||||
|
||||
if !ok {
|
||||
return fmt.Errorf("Cannot fetch submodule commit id in prjgit for '%s'", req.Repository.Name)
|
||||
}
|
||||
|
||||
// nothing changed, still in sync
|
||||
if commitId == req.Pull_Request.Head.Sha {
|
||||
h.Log("commitID already match - nothing to do")
|
||||
return nil
|
||||
}
|
||||
|
||||
h.Log("different ids: '%s' vs. '%s'", req.Pull_Request.Head.Sha, commitId)
|
||||
|
||||
commitMsg := fmt.Sprintf(`Sync PR
|
||||
|
||||
Update to %s`, req.Pull_Request.Head.Sha)
|
||||
|
||||
h.Log("will create new commit msg: %s", commitMsg)
|
||||
h.Log("error? %v", h.Error)
|
||||
|
||||
// we need to update prjgit PR with the new head hash
|
||||
branchName := prGitBranchNameForPR(req)
|
||||
updateOrCreatePRBranch(h, commitMsg, branchName)
|
||||
|
||||
return h.Error
|
||||
}
|
||||
|
||||
func processPullRequestOpened(h *common.RequestHandler) error {
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
// requests against project are not handled here
|
||||
if req.Repository.Name == common.DefaultGitPrj {
|
||||
return nil
|
||||
}
|
||||
|
||||
// create PrjGit branch for buidling the pull request
|
||||
branchName := prGitBranchNameForPR(req)
|
||||
commitMsg := fmt.Sprintf(`auto-created for %s
|
||||
|
||||
This commit was autocreated by %s
|
||||
referencing
|
||||
|
||||
PullRequest: %s/%s#%d`, req.Repository.Owner.Username,
|
||||
req.Repository.Name, GitAuthor, req.Repository.Name, req.Pull_Request.Number)
|
||||
|
||||
prjGit := h.CreateRepositoryIfNotExist(*req.Repository.Owner, common.DefaultGitPrj)
|
||||
if h.HasError() {
|
||||
return h.Error
|
||||
}
|
||||
|
||||
h.GitExec("", "clone", "--depth", "1", prjGit.SSHURL, common.DefaultGitPrj)
|
||||
h.GitExec(common.DefaultGitPrj, "checkout", "-B", branchName, prjGit.DefaultBranch)
|
||||
updateOrCreatePRBranch(h, commitMsg, branchName)
|
||||
|
||||
PR := h.CreatePullRequest(prjGit, branchName, prjGit.DefaultBranch,
|
||||
fmt.Sprintf("Forwarded PR: %s", req.Repository.Name),
|
||||
fmt.Sprintf(`This is a forwarded pull request by %s
|
||||
referencing the following pull request:
|
||||
|
||||
`+common.PrPattern,
|
||||
GitAuthor, req.Repository.Owner.Username, req.Repository.Name, req.Pull_Request.Number),
|
||||
)
|
||||
|
||||
if h.HasError() {
|
||||
return h.Error
|
||||
}
|
||||
|
||||
// request build review
|
||||
h.RequestReviews(PR, common.Bot_BuildReview)
|
||||
|
||||
return h.Error
|
||||
}
|
||||
|
||||
func processPullRequest(h *common.RequestHandler) error {
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
switch req.Action {
|
||||
case "opened", "reopened":
|
||||
return processPullRequestOpened(h)
|
||||
case "synchronized":
|
||||
return processPullRequestSync(h)
|
||||
case "edited":
|
||||
// not need to be handled??
|
||||
return nil
|
||||
case "closed":
|
||||
return processPullRequestClosed(h)
|
||||
}
|
||||
|
||||
return fmt.Errorf("Unhandled pull request action: %s", req.Action)
|
||||
}
|
||||
|
||||
func main() {
|
||||
var defs common.ListenDefinitions
|
||||
|
||||
defs.Url = PrReview
|
||||
defs.GitAuthor = GitAuthor
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_PR] = processPullRequest
|
||||
defs.Handlers[common.RequestType_PR_sync] = processPullRequest
|
||||
|
||||
common.RequireGiteaSecretToken()
|
||||
common.RequireObsSecretToken()
|
||||
common.StartServerWithAddress(defs, ListenAddr)
|
||||
}
|
1
prjgit-updater/.gitignore
vendored
1
prjgit-updater/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
prjgit-updater
|
@@ -1,459 +0,0 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
const AppName = "direct_workflow"
|
||||
const GitAuthor = "AutoGits prjgit-updater"
|
||||
const GitEmail = "adam+autogits-direct@zombino.com"
|
||||
|
||||
var configuredRepos map[string][]*common.AutogitConfig
|
||||
var gitea *common.GiteaTransport
|
||||
|
||||
func isConfiguredOrg(org *common.Organization) bool {
|
||||
_, found := configuredRepos[org.Username]
|
||||
return found
|
||||
}
|
||||
|
||||
func concatenateErrors(err1, err2 error) error {
|
||||
if err1 == nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
if err2 == nil {
|
||||
return err1
|
||||
}
|
||||
|
||||
return fmt.Errorf("%w\n%w", err1, err2)
|
||||
}
|
||||
|
||||
func processRepositoryAction(h *common.RequestHandler) error {
|
||||
action := h.Request.Data.(*common.RepositoryWebhookEvent)
|
||||
configs, configFound := configuredRepos[action.Organization.Username]
|
||||
|
||||
if !configFound {
|
||||
h.StdLogger.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Organization.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if config.GitProjectName == action.Repository.Name {
|
||||
h.StdLogger.Println("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
err = concatenateErrors(err, processConfiguredRepositoryAction(h, action, config))
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
func processConfiguredRepositoryAction(h *common.RequestHandler, action *common.RepositoryWebhookEvent, config *common.AutogitConfig) error {
|
||||
prjgit := config.GitProjectName
|
||||
git, err := common.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// defer git.Close()
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, *action.Organization, prjgit)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s err: %w", prjgit, err)
|
||||
}
|
||||
|
||||
if err := git.GitExec("", "clone", "--depth", "1", prjGitRepo.SSHURL, common.DefaultGitPrj); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch action.Action {
|
||||
case "created":
|
||||
if err := git.GitExec(common.DefaultGitPrj, "submodule", "--quiet", "add", "--depth", "1", action.Repository.Clone_Url); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "commit", "-m", "Automatic package inclusion via Direct Workflow"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "push"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case "deleted":
|
||||
if stat, err := os.Stat(filepath.Join(git.GitPath, common.DefaultGitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if git.DebugLogger {
|
||||
h.StdLogger.Printf("delete event for %s -- not in project. Ignoring\n", action.Repository.Name)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "rm", action.Repository.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "commit", "-m", "Automatic package removal via Direct Workflow"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "push"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
default:
|
||||
return fmt.Errorf("%s: %s", "Unknown action type", action.Action)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processPushAction(h *common.RequestHandler) error {
|
||||
action := h.Request.Data.(*common.PushWebhookEvent)
|
||||
configs, configFound := configuredRepos[action.Repository.Owner.Username]
|
||||
|
||||
if !configFound {
|
||||
h.StdLogger.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Repository.Owner.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if config.GitProjectName == action.Repository.Name {
|
||||
h.StdLogger.Println("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
err = concatenateErrors(err, processConfiguredPushAction(h, action, config))
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredPushAction(h *common.RequestHandler, action *common.PushWebhookEvent, config *common.AutogitConfig) error {
|
||||
prjgit := config.GitProjectName
|
||||
git, err := common.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer git.Close()
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, *action.Repository.Owner, prjgit)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s err: %w", prjgit, err)
|
||||
}
|
||||
|
||||
if err := git.GitExec("", "clone", "--depth", "1", prjGitRepo.SSHURL, common.DefaultGitPrj); err != nil {
|
||||
return err
|
||||
}
|
||||
if stat, err := os.Stat(filepath.Join(git.GitPath, common.DefaultGitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if git.DebugLogger {
|
||||
h.StdLogger.Printf("Pushed to package that is not part of the project. Ignoring: %v\n", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "submodule", "update", "--init", "--depth", "1", "--checkout", action.Repository.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
id, err := git.GitBranchHead(filepath.Join(common.DefaultGitPrj, action.Repository.Name), action.Repository.Default_Branch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, commitId := range action.Commits {
|
||||
if commitId.Id == id {
|
||||
if err := git.GitExec(filepath.Join(common.DefaultGitPrj, action.Repository.Name), "fetch", "--depth", "1", "origin", id); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(filepath.Join(common.DefaultGitPrj, action.Repository.Name), "checkout", id); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", "Automatic update via push via Direct Workflow"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(common.DefaultGitPrj, "push"); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
h.StdLogger.Println("push of refs not on the main branch. ignoring.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func verifyProjectState(git *common.GitHandler, orgName string, config *common.AutogitConfig, configs []*common.AutogitConfig) error {
|
||||
org := common.Organization{
|
||||
Username: orgName,
|
||||
}
|
||||
repo, err := gitea.CreateRepositoryIfNotExist(git, org, config.GitProjectName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching or creating '%s/%s' -- aborting verifyProjectState(). Err: %w", orgName, config.GitProjectName, err)
|
||||
}
|
||||
|
||||
if err := git.GitExec("", "clone", "--depth", "1", repo.SSHURL, config.GitProjectName); err != nil {
|
||||
return fmt.Errorf("Error closing projectgit for %s, Err: %w", config.GitProjectName, err)
|
||||
}
|
||||
|
||||
log.Println("getting submodule list")
|
||||
sub, err := git.GitSubmoduleList(config.GitProjectName, "HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodule list... Err: %w", err)
|
||||
}
|
||||
|
||||
isGitUpdated := false
|
||||
next_package:
|
||||
for filename, commitId := range sub {
|
||||
// ignore project gits
|
||||
for _, c := range configs {
|
||||
if c.GitProjectName == filename {
|
||||
log.Println(" prjgit as package? ignoring project git:", filename)
|
||||
continue next_package
|
||||
}
|
||||
}
|
||||
|
||||
log.Println(" verifying package:", filename, commitId, config.Branch)
|
||||
commits, err := gitea.GetRecentCommits(orgName, filename, config.Branch, 10)
|
||||
if err != nil {
|
||||
// assumption that package does not exist, remove from project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
if err := git.GitExec(config.GitProjectName, "rm", filename); err != nil {
|
||||
return fmt.Errorf("Failed to remove deleted submodule. Err: %w", err)
|
||||
}
|
||||
isGitUpdated = true
|
||||
continue
|
||||
}
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("Failed to fetch recent commits for package: '%s'. Err: %w", filename, err)
|
||||
// }
|
||||
|
||||
idx := 1000
|
||||
for i, c := range commits {
|
||||
if c.SHA == commitId {
|
||||
idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if idx == 0 {
|
||||
// up-to-date
|
||||
continue
|
||||
} else if idx < len(commits) { // update
|
||||
if err := git.GitExec(config.GitProjectName, "submodule", "update", "--init", "--depth", "1", "--checkout", filename); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(filepath.Join(config.GitProjectName, filename), "fetch", "--depth", "1", "origin", commits[0].SHA); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(filepath.Join(config.GitProjectName, filename), "checkout", commits[0].SHA); err != nil {
|
||||
return err
|
||||
}
|
||||
isGitUpdated = true
|
||||
} else {
|
||||
// probably need `merge-base` or `rev-list` here instead, or the project updated already
|
||||
return fmt.Errorf("Cannot find SHA of last matching update for package: '%s'. idx: %d", filename, idx)
|
||||
}
|
||||
}
|
||||
|
||||
// find all missing repositories, and add them
|
||||
if debugMode {
|
||||
log.Println("checking for missing repositories...")
|
||||
}
|
||||
repos, err := gitea.GetOrganizationRepositories(orgName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if debugMode {
|
||||
log.Println(" nRepos:", len(repos))
|
||||
}
|
||||
|
||||
next_repo:
|
||||
for _, r := range repos {
|
||||
if debugMode {
|
||||
log.Println(" -- checking", r.Name)
|
||||
}
|
||||
|
||||
for _, c := range configs {
|
||||
if c.Organization == orgName && c.GitProjectName == r.Name {
|
||||
// ignore project gits
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for repo := range sub {
|
||||
if repo == r.Name {
|
||||
continue next_repo
|
||||
}
|
||||
}
|
||||
|
||||
if debugMode {
|
||||
log.Println(" -- checking repository:", r.Name)
|
||||
}
|
||||
|
||||
if _, err := gitea.GetRecentCommits(orgName, r.Name, config.Branch, 10); err != nil {
|
||||
// assumption that package does not exist, so not part of project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
break
|
||||
}
|
||||
|
||||
// add repository to git project
|
||||
if err := git.GitExec(config.GitProjectName, "submodule", "--quiet", "add", "--depth", "1", r.SSHURL); err != nil {
|
||||
return fmt.Errorf("Cannot add submodule '%s' to project '%s'. Err: %w", r.Name, config.GitProjectName, err)
|
||||
}
|
||||
|
||||
if len(config.Branch) > 0 {
|
||||
if err := git.GitExec(path.Join(config.GitProjectName, r.Name), "fetch", "--depth", "1", "origin", config.Branch); err != nil {
|
||||
return fmt.Errorf("Failed to fetch branch '%s' from '%s'/'%s'. Err: %w", config.Branch, orgName, r.Name, err)
|
||||
}
|
||||
if err := git.GitExec(path.Join(config.GitProjectName, r.Name), "checkout", config.Branch); err != nil {
|
||||
return fmt.Errorf("Failed to checkout fetched branch '%s' from '%s'/'%s'. Err: %w", config.Branch, orgName, r.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
isGitUpdated = true
|
||||
}
|
||||
|
||||
if isGitUpdated {
|
||||
if err := git.GitExec(config.GitProjectName, "commit", "-a", "-m", "Automatic update via push via Direct Workflow -- SYNC"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := git.GitExec(config.GitProjectName, "push"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if debugMode {
|
||||
log.Println("Verification finished for ", orgName, ", config", config.GitProjectName)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var checkOnStart bool
|
||||
var checkInterval time.Duration
|
||||
|
||||
func consistencyCheckProcess() {
|
||||
if checkOnStart {
|
||||
log.Println("== Startup consistency check begin...")
|
||||
for org, configs := range configuredRepos {
|
||||
for _, config := range configs {
|
||||
log.Println(" - org: ", org, " - config: ", config.GitProjectName)
|
||||
git, err := common.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
log.Println("Failed to allocate GitHandler:", err)
|
||||
return
|
||||
}
|
||||
if err := verifyProjectState(git, org, config, configs); err != nil {
|
||||
log.Println("Failed to verify state of org:", org, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Println("== Startup consistency check done...")
|
||||
}
|
||||
|
||||
for org, configs := range configuredRepos {
|
||||
for _, config := range configs {
|
||||
sleepInterval := checkInterval - checkInterval/2 + time.Duration(rand.Int63n(int64(checkInterval)))
|
||||
log.Println(" - sleep interval", sleepInterval)
|
||||
time.Sleep(sleepInterval)
|
||||
|
||||
log.Printf(" ++ starting verification, org: `%s`\n", org)
|
||||
git, err := common.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
log.Println("Faield to allocate GitHandler:", err)
|
||||
return
|
||||
}
|
||||
if err := verifyProjectState(git, org, config, configs); err != nil {
|
||||
log.Printf(" *** verification failed, org: `%s`, err: %#v\n", org, err)
|
||||
}
|
||||
log.Printf(" ++ verification complete, org: `%s`\n", org)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var debugMode bool
|
||||
|
||||
func main() {
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
workflowConfig := flag.String("config", "", "Repository and workflow definition file")
|
||||
giteaHost := flag.String("gitea", "src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
flag.BoolVar(&debugMode, "debug", false, "Extra debugging information")
|
||||
flag.BoolVar(&checkOnStart, "check-on-start", false, "Check all repositories for consistency on start, without delays")
|
||||
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
|
||||
flag.Parse()
|
||||
|
||||
checkInterval = time.Duration(*checkIntervalHours) * time.Hour
|
||||
|
||||
if len(*workflowConfig) == 0 {
|
||||
log.Fatalln("No configuratio file specified. Aborting")
|
||||
}
|
||||
|
||||
configs, err := common.ReadWorkflowConfigsFile(*workflowConfig)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
configuredRepos = make(map[string][]*common.AutogitConfig)
|
||||
orgs := make([]string, 0, 1)
|
||||
for _, c := range configs {
|
||||
if slices.Contains(c.Workflows, "direct") {
|
||||
if debugMode {
|
||||
log.Printf(" + adding org: '%s', branch: '%s', prjgit: '%s'\n", c.Organization, c.Branch, c.GitProjectName)
|
||||
}
|
||||
configs := configuredRepos[c.Organization]
|
||||
if configs == nil {
|
||||
configs = make([]*common.AutogitConfig, 0, 1)
|
||||
}
|
||||
configs = append(configs, c)
|
||||
configuredRepos[c.Organization] = configs
|
||||
|
||||
orgs = append(orgs, c.Organization)
|
||||
}
|
||||
}
|
||||
|
||||
gitea = common.AllocateGiteaTransport(*giteaHost)
|
||||
go consistencyCheckProcess()
|
||||
|
||||
var defs common.ListenDefinitions
|
||||
|
||||
defs.GitAuthor = GitAuthor
|
||||
defs.RabbitURL = *rabbitUrl
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_Push] = processPushAction
|
||||
defs.Handlers[common.RequestType_Repository] = processRepositoryAction
|
||||
|
||||
log.Fatal(common.ProcessRabbitMQEvents(defs, orgs))
|
||||
}
|
@@ -1,4 +0,0 @@
|
||||
package main
|
||||
|
||||
// TODO, like documentation :-)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
[Unit]
|
||||
Description=Gitea webhook to RabbitMQ message publisher
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
@@ -10,6 +11,5 @@ NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
|
||||
[Install]
|
||||
After=network-online.target
|
||||
WantedBy=multi-user.target
|
||||
|
||||
|
BIN
vendor-gitea-events-rabbitmq-publisher.tar.zst
(Stored with Git LFS)
Normal file
BIN
vendor-gitea-events-rabbitmq-publisher.tar.zst
(Stored with Git LFS)
Normal file
Binary file not shown.
1
workflow-direct/.gitignore
vendored
Normal file
1
workflow-direct/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
workflow-direct
|
@@ -1,5 +1,5 @@
|
||||
Project Git bot
|
||||
===============
|
||||
Direct Workflow bot
|
||||
===================
|
||||
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
@@ -11,8 +11,9 @@ Areas of responsibility
|
||||
* on package removal, removes the submodule
|
||||
|
||||
2. Assumes:
|
||||
* config.GitProjectName == project name (default: _ObsPrj)
|
||||
* config.GitProjectName == project name (default: `_ObsPrj`)
|
||||
* Other repositories == packages (similar to OBS project)
|
||||
* config.Branch == "" => default branch from Gitea
|
||||
|
||||
|
||||
Target Usage
|
6
workflow-direct/example.json
Normal file
6
workflow-direct/example.json
Normal file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
"autogits/MyPrj",
|
||||
"autogits/HiddenPrj",
|
||||
"testing"
|
||||
]
|
||||
|
41
workflow-direct/go.mod
Normal file
41
workflow-direct/go.mod
Normal file
@@ -0,0 +1,41 @@
|
||||
module src.opensuse.org/autogits/workflow-direct
|
||||
|
||||
go 1.23.1
|
||||
|
||||
toolchain go1.24.0
|
||||
|
||||
replace src.opensuse.org/autogits/common => ../bots-common
|
||||
|
||||
require (
|
||||
go.uber.org/mock v0.5.0
|
||||
src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
github.com/go-openapi/runtime v0.28.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/strfmt v0.23.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
github.com/rabbitmq/amqp091-go v1.10.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
@@ -68,6 +68,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
95
workflow-direct/links.go
Normal file
95
workflow-direct/links.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PackageRebaseLink struct {
|
||||
Pkg string // fork to track
|
||||
SelfRef bool // if we reference different branch as upstream, instead of parentRepo
|
||||
SourceBranch string // branch to follow, empty for default
|
||||
|
||||
parentRepo *models.Repository
|
||||
}
|
||||
|
||||
func fetchProjectLinksFile(org, prj string) ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func parseProjectLinks(data []byte) ([]*PackageRebaseLink, error) {
|
||||
values := make([]*PackageRebaseLink, 0, 100)
|
||||
|
||||
if len(data) == 0 {
|
||||
return values, nil
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, &values); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
slices.SortFunc(values, func(a, b *PackageRebaseLink) int {
|
||||
return strings.Compare(a.Pkg, b.Pkg)
|
||||
})
|
||||
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func ProcessProjectLinks(gitea common.Gitea, org, prjGit, branch string) ([]*PackageRebaseLink, string, error) {
|
||||
data, hash, err := gitea.GetRepositoryFileContent(org, prjGit, branch, common.PrjLinksFile)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
links, err := parseProjectLinks(data)
|
||||
return links, hash, err
|
||||
}
|
||||
|
||||
func ResolveLinks(org string, links []*PackageRebaseLink, gitea common.GiteaRepoFetcher) {
|
||||
for _, link := range links {
|
||||
if repo, err := gitea.GetRepository(org, link.Pkg); err == nil {
|
||||
if link.SelfRef {
|
||||
link.parentRepo = repo
|
||||
} else {
|
||||
link.parentRepo = repo.Parent
|
||||
}
|
||||
|
||||
if len(link.SourceBranch) == 0 {
|
||||
link.SourceBranch = repo.DefaultBranch
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func ListenOrgsForUpdates(links []*PackageRebaseLink) []string {
|
||||
orgs := make([]string, 0, len(links))
|
||||
for _, link := range links {
|
||||
if !slices.Contains(orgs, link.parentRepo.Owner.UserName) {
|
||||
orgs = append(orgs, link.parentRepo.Owner.UserName)
|
||||
}
|
||||
}
|
||||
|
||||
return orgs
|
||||
}
|
||||
|
||||
func (link *PackageRebaseLink) StateUpdater(git common.Git, gitea common.Gitea, pkgSubmoduleDir string) error {
|
||||
if link.parentRepo == nil {
|
||||
return fmt.Errorf(" *** Can't update link in '%s' -- no parent repository", pkgSubmoduleDir)
|
||||
}
|
||||
|
||||
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgSubmoduleDir, "remote"), "\n")
|
||||
if !slices.Contains(remotes, "link") {
|
||||
git.GitExecWithOutputOrPanic(pkgSubmoduleDir, "remote", "add", "link", link.parentRepo.SSHURL)
|
||||
}
|
||||
git.GitExecOrPanic(pkgSubmoduleDir, "fetch", "link", link.SourceBranch)
|
||||
if err := git.GitExec(pkgSubmoduleDir, "rebase", "link", link.SourceBranch); err != nil {
|
||||
git.GitExec(pkgSubmoduleDir, "rebase", "--abort")
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
120
workflow-direct/links_test.go
Normal file
120
workflow-direct/links_test.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestLinkParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data []byte
|
||||
links []*PackageRebaseLink
|
||||
expected_err string
|
||||
}{
|
||||
{
|
||||
name: "No links file",
|
||||
},
|
||||
{
|
||||
name: "Empty links file",
|
||||
data: []byte("[]"),
|
||||
},
|
||||
{
|
||||
name: "Single package linked",
|
||||
data: []byte(`[{"Pkg": "foo"}]`),
|
||||
links: []*PackageRebaseLink{
|
||||
&PackageRebaseLink{
|
||||
Pkg: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Multiple packages linked, resorted",
|
||||
data: []byte(`[{"Pkg": "foo"}, {"Pkg": "abc"}, {"Pkg": "aaa", "SourceBranch": "q"}]`),
|
||||
links: []*PackageRebaseLink{
|
||||
&PackageRebaseLink{
|
||||
Pkg: "aaa",
|
||||
SourceBranch: "q",
|
||||
},
|
||||
&PackageRebaseLink{
|
||||
Pkg: "abc",
|
||||
},
|
||||
&PackageRebaseLink{
|
||||
Pkg: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Broken source file",
|
||||
data: []byte(`[{"Pkg": "pool"}, SourceBranch": "pool", "TargetBranch": "abc"}, {"Pkg": "abc"}]`),
|
||||
expected_err: "invalid character 'S' looking for beginning of value",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea_mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
gitea_mock.EXPECT().GetRepositoryFileContent("org", "repo", "branch", common.PrjLinksFile).Return(test.data, "", nil)
|
||||
|
||||
res, _, err := ProcessProjectLinks(gitea_mock, "org", "repo", "branch")
|
||||
if len(test.expected_err) > 0 && err == nil {
|
||||
t.Error("Expected an error:", test.expected_err, "; but got nothing")
|
||||
} else if err != nil && len(test.expected_err) == 0 {
|
||||
t.Fatal("Unexpected error:", err)
|
||||
} else if err != nil && !strings.Contains(err.Error(), test.expected_err) {
|
||||
t.Fatal("Expected an error:", test.expected_err, "; but got:", err)
|
||||
}
|
||||
|
||||
if len(res) != len(test.links) {
|
||||
t.Fatal("len of result", len(res), "vs. expected", len(test.links))
|
||||
}
|
||||
|
||||
// verify that returned data is present and in expected order
|
||||
for i := range test.links {
|
||||
if *test.links[i] != *res[i] {
|
||||
t.Error("index", i, "expected", *test.links[i], "received", *res[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
/*
|
||||
func TestLinkUpdater(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
mock_setup func(*mock_common.MockGit, *mock_common.MockGitea)
|
||||
link *OrgLinks
|
||||
}{
|
||||
{
|
||||
name: "no-op update",
|
||||
mock_setup: func(git *mock_common.MockGit, gitea *mock_common.MockGitea) {
|
||||
|
||||
git.EXPECT().GitExecWithOutputOrPanic(gomock.Any(), "rev-list", "upstream/main", "^pkg_hash").Return("")
|
||||
},
|
||||
link: &OrgLinks{
|
||||
Pkg: "pkg",
|
||||
// parentOrg: "rorg",
|
||||
// parentRepo: "rpkg",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
git := mock_common.NewMockGit(ctl)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
test.mock_setup(git, gitea)
|
||||
test.link.StateUpdater(git, gitea)
|
||||
})
|
||||
}
|
||||
}
|
||||
*/
|
556
workflow-direct/main.go
Normal file
556
workflow-direct/main.go
Normal file
@@ -0,0 +1,556 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
const (
|
||||
AppName = "direct_workflow"
|
||||
GitAuthor = "AutoGits prjgit-updater"
|
||||
GitEmail = "adam+autogits-direct@zombino.com"
|
||||
)
|
||||
|
||||
var configuredRepos map[string][]*common.AutogitConfig
|
||||
var gitea common.Gitea
|
||||
|
||||
var orgLinks map[string]*PackageRebaseLink
|
||||
|
||||
func isConfiguredOrg(org *common.Organization) bool {
|
||||
_, found := configuredRepos[org.Username]
|
||||
return found
|
||||
}
|
||||
|
||||
func concatenateErrors(err1, err2 error) error {
|
||||
if err1 == nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
if err2 == nil {
|
||||
return err1
|
||||
}
|
||||
|
||||
return fmt.Errorf("%w\n%w", err1, err2)
|
||||
}
|
||||
|
||||
type RepositoryActionProcessor struct{}
|
||||
|
||||
func (*RepositoryActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
action := request.Data.(*common.RepositoryWebhookEvent)
|
||||
configs, configFound := configuredRepos[action.Organization.Username]
|
||||
|
||||
if !configFound {
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Organization.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if config.GitProjectName == action.Repository.Name {
|
||||
log.Println("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
err = concatenateErrors(err, processConfiguredRepositoryAction(action, config))
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, config *common.AutogitConfig) error {
|
||||
prjgit := config.GitProjectName
|
||||
ghi := common.GitHandlerGeneratorImpl{}
|
||||
git, err := ghi.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
common.PanicOnError(err)
|
||||
if !DebugMode {
|
||||
defer git.Close()
|
||||
}
|
||||
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, action.Organization.Username, prjgit)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s err: %w", prjgit, err)
|
||||
}
|
||||
|
||||
if _, err := fs.Stat(os.DirFS(git.GetPath()), config.GitProjectName); errors.Is(err, os.ErrNotExist) {
|
||||
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", prjGitRepo.SSHURL, prjgit))
|
||||
}
|
||||
|
||||
switch action.Action {
|
||||
case "created":
|
||||
if action.Repository.Object_Format_Name != "sha256" {
|
||||
return fmt.Errorf(" - '%s' repo is not sha256. Ignoring.", action.Repository.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(prjgit, "submodule", "--quiet", "add", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(prjgit, action.Repository.Name), "branch", "--show-current"))
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(prjgit, action.Repository.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(prjgit, action.Repository.Name), "checkout", config.Branch))
|
||||
}
|
||||
common.PanicOnError(git.GitExec(prjgit, "commit", "-m", "Automatic package inclusion via Direct Workflow"))
|
||||
common.PanicOnError(git.GitExec(prjgit, "push"))
|
||||
|
||||
case "deleted":
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), prjgit, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if DebugMode {
|
||||
log.Println("delete event for", action.Repository.Name, "-- not in project. Ignoring")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
common.PanicOnError(git.GitExec(prjgit, "rm", action.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(prjgit, "commit", "-m", "Automatic package removal via Direct Workflow"))
|
||||
common.PanicOnError(git.GitExec(prjgit, "push"))
|
||||
|
||||
default:
|
||||
return fmt.Errorf("%s: %s", "Unknown action type", action.Action)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type PushActionProcessor struct{}
|
||||
|
||||
func (*PushActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
action := request.Data.(*common.PushWebhookEvent)
|
||||
configs, configFound := configuredRepos[action.Repository.Owner.Username]
|
||||
|
||||
if !configFound {
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Repository.Owner.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if config.GitProjectName == action.Repository.Name {
|
||||
log.Println("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
err = concatenateErrors(err, processConfiguredPushAction(action, config))
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredPushAction(action *common.PushWebhookEvent, config *common.AutogitConfig) error {
|
||||
prjgit := config.GitProjectName
|
||||
ghi := common.GitHandlerGeneratorImpl{}
|
||||
git, err := ghi.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
common.PanicOnError(err)
|
||||
if !DebugMode {
|
||||
defer git.Close()
|
||||
}
|
||||
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
log.Println(" + default branch", action.Repository.Default_Branch)
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, action.Repository.Owner.Username, prjgit)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s err: %w", prjgit, err)
|
||||
}
|
||||
|
||||
if _, err := fs.Stat(os.DirFS(git.GetPath()), config.GitProjectName); errors.Is(err, os.ErrNotExist) {
|
||||
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", prjGitRepo.SSHURL, prjgit))
|
||||
}
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), prjgit, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if DebugMode {
|
||||
log.Println("Pushed to package that is not part of the project. Ignoring:", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
common.PanicOnError(git.GitExec(prjgit, "submodule", "update", "--init", "--depth", "1", "--checkout", action.Repository.Name))
|
||||
if err := git.GitExec(filepath.Join(prjgit, action.Repository.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
id, err := git.GitBranchHead(filepath.Join(prjgit, action.Repository.Name), config.Branch)
|
||||
common.PanicOnError(err)
|
||||
for _, commitId := range action.Commits {
|
||||
if commitId.Id == id {
|
||||
common.PanicOnError(git.GitExec(filepath.Join(prjgit, action.Repository.Name), "fetch", "--depth", "1", "origin", id))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(prjgit, action.Repository.Name), "checkout", id))
|
||||
common.PanicOnError(git.GitExec(prjgit, "commit", "-a", "-m", "Automatic update via push via Direct Workflow"))
|
||||
common.PanicOnError(git.GitExec(prjgit, "push"))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
log.Println("push of refs not on the configured branch", config.Branch, ". ignoring.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func verifyProjectState(git common.Git, org string, config *common.AutogitConfig, configs []*common.AutogitConfig) (err error) {
|
||||
defer func() {
|
||||
e := recover()
|
||||
if e != nil {
|
||||
errCast, ok := e.(error)
|
||||
if ok {
|
||||
err = errCast
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
repo, err := gitea.CreateRepositoryIfNotExist(git, org, config.GitProjectName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching or creating '%s/%s' -- aborting verifyProjectState(). Err: %w", org, config.GitProjectName, err)
|
||||
}
|
||||
|
||||
if _, err := fs.Stat(os.DirFS(git.GetPath()), config.GitProjectName); errors.Is(err, os.ErrNotExist) {
|
||||
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", repo.SSHURL, config.GitProjectName))
|
||||
}
|
||||
log.Println(" * Getting submodule list")
|
||||
sub, err := git.GitSubmoduleList(config.GitProjectName, "HEAD")
|
||||
common.PanicOnError(err)
|
||||
|
||||
log.Println(" * Getting package links")
|
||||
var pkgLinks []*PackageRebaseLink
|
||||
if f, err := fs.Stat(os.DirFS(path.Join(git.GetPath(), config.GitProjectName)), common.PrjLinksFile); err == nil && (f.Mode()&fs.ModeType == 0) && f.Size() < 1000000 {
|
||||
if data, err := os.ReadFile(path.Join(git.GetPath(), config.GitProjectName, common.PrjLinksFile)); err == nil {
|
||||
pkgLinks, err = parseProjectLinks(data)
|
||||
if err != nil {
|
||||
log.Println("Cannot parse project links file:", err.Error())
|
||||
pkgLinks = nil
|
||||
} else {
|
||||
ResolveLinks(org, pkgLinks, gitea)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Println(" - No package links defined")
|
||||
}
|
||||
|
||||
/* Check existing submodule that they are updated */
|
||||
|
||||
isGitUpdated := false
|
||||
next_package:
|
||||
for filename, commitId := range sub {
|
||||
// ignore project gits
|
||||
for _, c := range configs {
|
||||
if c.GitProjectName == filename {
|
||||
log.Println(" prjgit as package? ignoring project git:", filename)
|
||||
continue next_package
|
||||
}
|
||||
}
|
||||
|
||||
log.Println(" verifying package:", filename, commitId, config.Branch)
|
||||
commits, err := gitea.GetRecentCommits(org, filename, config.Branch, 10)
|
||||
if err != nil {
|
||||
// assumption that package does not exist, remove from project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
if err := git.GitExec(config.GitProjectName, "rm", filename); err != nil {
|
||||
return fmt.Errorf("Failed to remove deleted submodule. Err: %w", err)
|
||||
}
|
||||
isGitUpdated = true
|
||||
continue
|
||||
}
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("Failed to fetch recent commits for package: '%s'. Err: %w", filename, err)
|
||||
// }
|
||||
|
||||
idx := 1000
|
||||
for i, c := range commits {
|
||||
if c.SHA == commitId {
|
||||
idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var link *PackageRebaseLink
|
||||
for _, l := range pkgLinks {
|
||||
if l.Pkg == filename {
|
||||
link = l
|
||||
|
||||
log.Println(" -> linked package")
|
||||
// so, we need to rebase here. Can't really optimize, so clone entire package tree and remote
|
||||
pkgPath := path.Join(config.GitProjectName, filename)
|
||||
git.GitExecOrPanic(config.GitProjectName, "submodule", "update", "--init", "--checkout", filename)
|
||||
git.GitExecOrPanic(pkgPath, "fetch", "origin", commits[0].SHA)
|
||||
git.GitExecOrPanic(pkgPath, "tag", "NOW")
|
||||
git.GitExecOrPanic(pkgPath, "fetch", "origin")
|
||||
git.GitExecOrPanic(pkgPath, "remote", "add", "parent", link.parentRepo.SSHURL)
|
||||
git.GitExecOrPanic(pkgPath, "fetch", "parent")
|
||||
git.GitExecOrPanic(pkgPath, "rebase", "--onto", "parent", link.SourceBranch)
|
||||
|
||||
nCommits := len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgPath, "rev-list", "^NOW", "HEAD"), "\n"))
|
||||
if nCommits > 0 {
|
||||
git.GitExecOrPanic(pkgPath, "push", "-f", "origin", "HEAD:"+config.Branch)
|
||||
isGitUpdated = true
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if link == nil {
|
||||
if idx == 0 {
|
||||
// up-to-date
|
||||
continue
|
||||
} else if idx < len(commits) { // update
|
||||
common.PanicOnError(git.GitExec(config.GitProjectName, "submodule", "update", "--init", "--depth", "1", "--checkout", filename))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(config.GitProjectName, filename), "fetch", "--depth", "1", "origin", commits[0].SHA))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(config.GitProjectName, filename), "checkout", commits[0].SHA))
|
||||
isGitUpdated = true
|
||||
} else {
|
||||
// probably need `merge-base` or `rev-list` here instead, or the project updated already
|
||||
return fmt.Errorf("Cannot find SHA of last matching update for package: '%s'. idx: %d", filename, idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// find all missing repositories, and add them
|
||||
if DebugMode {
|
||||
log.Println("checking for missing repositories...")
|
||||
}
|
||||
repos, err := gitea.GetOrganizationRepositories(org)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
log.Println(" nRepos:", len(repos))
|
||||
}
|
||||
|
||||
/* Check repositories in org to make sure they are included in project git */
|
||||
next_repo:
|
||||
for _, r := range repos {
|
||||
if DebugMode {
|
||||
log.Println(" -- checking", r.Name)
|
||||
}
|
||||
|
||||
if r.ObjectFormatName != "sha256" {
|
||||
if DebugMode {
|
||||
log.Println(" + ", r.ObjectFormatName, ". Needs to be sha256. Ignoring")
|
||||
}
|
||||
continue next_repo
|
||||
}
|
||||
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.GitProjectName == r.Name {
|
||||
// ignore project gits
|
||||
continue next_repo
|
||||
}
|
||||
}
|
||||
|
||||
for repo := range sub {
|
||||
if repo == r.Name {
|
||||
// not missing
|
||||
continue next_repo
|
||||
}
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
log.Println(" -- checking repository:", r.Name)
|
||||
}
|
||||
|
||||
if _, err := gitea.GetRecentCommits(org, r.Name, config.Branch, 1); err != nil {
|
||||
// assumption that package does not exist, so not part of project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
continue
|
||||
}
|
||||
|
||||
// add repository to git project
|
||||
common.PanicOnError(git.GitExec(config.GitProjectName, "submodule", "--quiet", "add", "--depth", "1", r.CloneURL, r.Name))
|
||||
|
||||
if len(config.Branch) > 0 {
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(config.GitProjectName, r.Name), "branch", "--show-current"))
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(config.GitProjectName, r.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("Fetching branch %s for %s/%s failed. Ignoring.", config.Branch, repo.Owner.UserName, r.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(config.GitProjectName, r.Name), "checkout", config.Branch))
|
||||
}
|
||||
}
|
||||
|
||||
isGitUpdated = true
|
||||
}
|
||||
|
||||
if isGitUpdated {
|
||||
common.PanicOnError(git.GitExec(config.GitProjectName, "commit", "-a", "-m", "Automatic update via push via Direct Workflow -- SYNC"))
|
||||
common.PanicOnError(git.GitExec(config.GitProjectName, "push"))
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
log.Println("Verification finished for ", org, ", config", config.GitProjectName)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var checkOnStart bool
|
||||
var checkInterval time.Duration
|
||||
|
||||
func checkRepos() {
|
||||
for org, configs := range configuredRepos {
|
||||
for _, config := range configs {
|
||||
if checkInterval > 0 {
|
||||
sleepInterval := checkInterval - checkInterval/2 + time.Duration(rand.Int63n(int64(checkInterval)))
|
||||
log.Println(" - sleep interval", sleepInterval, "until next check")
|
||||
time.Sleep(sleepInterval)
|
||||
}
|
||||
|
||||
log.Printf(" ++ starting verification, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
ghi := common.GitHandlerGeneratorImpl{}
|
||||
git, err := ghi.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
log.Println("Faield to allocate GitHandler:", err)
|
||||
return
|
||||
}
|
||||
if err := verifyProjectState(git, org, config, configs); err != nil {
|
||||
log.Printf(" *** verification failed, org: `%s`, err: %#v\n", org, err)
|
||||
}
|
||||
log.Printf(" ++ verification complete, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func consistencyCheckProcess() {
|
||||
if checkOnStart {
|
||||
savedCheckInterval := checkInterval
|
||||
checkInterval = 0
|
||||
log.Println("== Startup consistency check begin...")
|
||||
checkRepos()
|
||||
log.Println("== Startup consistency check done...")
|
||||
checkInterval = savedCheckInterval
|
||||
}
|
||||
|
||||
for {
|
||||
checkRepos()
|
||||
}
|
||||
}
|
||||
|
||||
var DebugMode bool
|
||||
|
||||
func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
configFile, err := common.ReadConfigFile(configFilename)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
configs, _ := common.ResolveWorkflowConfigs(gitea, configFile)
|
||||
configuredRepos = make(map[string][]*common.AutogitConfig)
|
||||
*orgs = make([]string, 0, 1)
|
||||
for _, c := range configs {
|
||||
if slices.Contains(c.Workflows, "direct") {
|
||||
if DebugMode {
|
||||
log.Printf(" + adding org: '%s', branch: '%s', prjgit: '%s'\n", c.Organization, c.Branch, c.GitProjectName)
|
||||
}
|
||||
configs := configuredRepos[c.Organization]
|
||||
if configs == nil {
|
||||
configs = make([]*common.AutogitConfig, 0, 1)
|
||||
}
|
||||
configs = append(configs, c)
|
||||
configuredRepos[c.Organization] = configs
|
||||
|
||||
*orgs = append(*orgs, c.Organization)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
configFilename := flag.String("config", "", "List of PrjGit")
|
||||
giteaHost := flag.String("gitea", "src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
flag.BoolVar(&DebugMode, "debug", false, "Extra debugging information")
|
||||
flag.BoolVar(&checkOnStart, "check-on-start", false, "Check all repositories for consistency on start, without delays")
|
||||
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
|
||||
flag.Parse()
|
||||
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
var defs common.ListenDefinitions
|
||||
|
||||
// handle reconfiguration
|
||||
signalChannel := make(chan os.Signal, 1)
|
||||
defer close(signalChannel)
|
||||
go func() {
|
||||
for {
|
||||
sig, ok := <-signalChannel
|
||||
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if sig != syscall.SIGHUP {
|
||||
log.Println("Unexpected signal received:", sig)
|
||||
continue
|
||||
}
|
||||
log.Println("*** Reconfiguring ***")
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
defs.UpdateTopics()
|
||||
}
|
||||
}()
|
||||
signal.Notify(signalChannel, syscall.SIGHUP)
|
||||
|
||||
checkInterval = time.Duration(*checkIntervalHours) * time.Hour
|
||||
|
||||
gitea = common.AllocateGiteaTransport(*giteaHost)
|
||||
CurrentUser, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
log.Fatalln("Cannot fetch current user:", err)
|
||||
}
|
||||
log.Println("Current User:", CurrentUser.UserName)
|
||||
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
|
||||
defs.GitAuthor = GitAuthor
|
||||
defs.RabbitURL, err = url.Parse(*rabbitUrl)
|
||||
if err != nil {
|
||||
log.Panicf("cannot parse server URL. Err: %#v\n", err)
|
||||
}
|
||||
|
||||
go consistencyCheckProcess()
|
||||
log.Println("defs:", defs)
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
|
||||
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
|
||||
|
||||
log.Fatal(defs.ProcessRabbitMQEvents())
|
||||
}
|
1
workflow-pr/.gitignore
vendored
Normal file
1
workflow-pr/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
workflow-pr
|
63
workflow-pr/README.md
Normal file
63
workflow-pr/README.md
Normal file
@@ -0,0 +1,63 @@
|
||||
Workflow-PR bot
|
||||
===============
|
||||
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Detects a PackageGit PR creation against a package and creates a coresponsing PR against the ProjectGit
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated
|
||||
* Stores reference to the PackageGit PR in the headers of the ProjectGit PR comments, for later reference
|
||||
* this allows ProjectGit PR to be merged to seperated later (via another tool, for example)
|
||||
* Initiates all staging workflows via review requests
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Any project (devel, etc) that accepts PR
|
||||
|
||||
|
||||
Config file
|
||||
-----------
|
||||
JSON
|
||||
* _Workflows_: "pr" -- pr workflow enabled
|
||||
* _Organization_: organization that holds all the packages
|
||||
* _Branch_: branch updated in repo's
|
||||
* _GitProjectName_: package in above org, or `org/package` for PrjGit
|
||||
* _Reviewers_: accounts associated with mandatory reviews for PrjGit. Can trigger additional
|
||||
review requests for PrjGit or associated PkgGit repos. Only when all reviews are
|
||||
satisfied, will the PrjGit PR be merged.
|
||||
|
||||
example:
|
||||
|
||||
[
|
||||
{
|
||||
"Workflows": ["pr", "direct"],
|
||||
"Organization": "autogits",
|
||||
"GitProjectName": "HiddenPrj",
|
||||
"Branch": "hidden",
|
||||
"Reviewers": []
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
|
||||
Maintainership
|
||||
--------------
|
||||
|
||||
Maintainership information is defined per project. For reviews, package maintainers are coalesced
|
||||
with project maintainers. A review by any of the maintainers is acceptable.
|
||||
|
||||
example:
|
||||
|
||||
{
|
||||
"package1": [ "reviewer", "reviewer2"],
|
||||
"package2": [],
|
||||
|
||||
// "project" maintainer
|
||||
"": ["reviewer3", "reviewer4"]
|
||||
}
|
||||
|
@@ -6,9 +6,14 @@
|
||||
},
|
||||
{
|
||||
"Workflows": ["direct"],
|
||||
"Organization": "autogits",
|
||||
"Organization": "autogits",
|
||||
"GitProjectName": "HiddenPrj",
|
||||
"Branch": "hidden"
|
||||
},
|
||||
{
|
||||
"Workflows": ["pr", "direct"],
|
||||
"Organization": "importtest",
|
||||
"Branch": "factory"
|
||||
}
|
||||
]
|
||||
|
@@ -1,10 +1,13 @@
|
||||
module src.opensuse.org/pr-review
|
||||
module src.opensuse.org/workflow-pr
|
||||
|
||||
go 1.22.3
|
||||
go 1.23.2
|
||||
|
||||
replace src.opensuse.org/autogits/common => ../bots-common
|
||||
|
||||
require src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
require (
|
||||
go.uber.org/mock v0.5.0
|
||||
src.opensuse.org/autogits/common v0.0.0-00010101000000-000000000000
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
@@ -26,6 +29,7 @@ require (
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
github.com/rabbitmq/amqp091-go v1.10.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
@@ -48,6 +48,8 @@ github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
|
||||
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
@@ -64,6 +66,10 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
147
workflow-pr/main.go
Normal file
147
workflow-pr/main.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"log"
|
||||
"net/url"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
const (
|
||||
AppName = "workflow-pr"
|
||||
GitAuthor = "AutoGits - pr-review"
|
||||
GitEmail = "adam+autogits-pr@zombino.com"
|
||||
)
|
||||
|
||||
/*
|
||||
func fetchPrGit(h *common.RequestHandler, pr *models.PullRequest) error {
|
||||
// clone PR head and base and return path
|
||||
if h.HasError() {
|
||||
return h.Error
|
||||
}
|
||||
if _, err := os.Stat(path.Join(h.GitPath, pr.Head.Sha)); os.IsNotExist(err) {
|
||||
h.GitExec("", "clone", "--depth", "1", pr.Head.Repo.CloneURL, pr.Head.Sha)
|
||||
h.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.Base.Sha)
|
||||
} else if err != nil {
|
||||
h.Error = err
|
||||
}
|
||||
|
||||
return h.Error
|
||||
}*/
|
||||
|
||||
var DebugMode bool
|
||||
var ListPROnly bool
|
||||
var PRID int64
|
||||
var CurrentUser *models.User
|
||||
|
||||
func main() {
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
workflowConfig := flag.String("config", "", "Repository and workflow definition file")
|
||||
giteaHost := flag.String("gitea", "src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
flag.BoolVar(&DebugMode, "debug", false, "Extra debugging information")
|
||||
checkOnStart := flag.Bool("check-on-start", false, "Check all repositories for consistency on start, without delays")
|
||||
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
|
||||
flag.BoolVar(&ListPROnly, "list-prs-only", false, "Only lists PRs without acting on them")
|
||||
flag.Int64Var(&PRID, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if len(*workflowConfig) == 0 {
|
||||
log.Fatalln("No configuratio file specified. Aborting")
|
||||
}
|
||||
|
||||
gitea := common.AllocateGiteaTransport(*giteaHost)
|
||||
config, err := common.ReadConfigFile(*workflowConfig)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
configs, err := common.ResolveWorkflowConfigs(gitea, config)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
req := new(RequestProcessor)
|
||||
|
||||
req.configuredRepos = make(map[string][]*common.AutogitConfig)
|
||||
req.git = &common.GitHandlerGeneratorImpl{}
|
||||
orgs := make([]string, 0, 1)
|
||||
for _, c := range configs {
|
||||
if slices.Contains(c.Workflows, "pr") {
|
||||
if DebugMode {
|
||||
log.Printf(" + adding org: '%s', branch: '%s', prjgit: '%s'\n", c.Organization, c.Branch, c.GitProjectName)
|
||||
}
|
||||
configs := req.configuredRepos[c.Organization]
|
||||
if configs == nil {
|
||||
configs = make([]*common.AutogitConfig, 0, 1)
|
||||
}
|
||||
configs = append(configs, c)
|
||||
req.configuredRepos[c.Organization] = configs
|
||||
|
||||
orgs = append(orgs, c.Organization)
|
||||
}
|
||||
}
|
||||
|
||||
if CurrentUser, err = gitea.GetCurrentUser(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Println("Running with token from", CurrentUser.UserName)
|
||||
|
||||
req.Synced = &PullRequestSynced{
|
||||
gitea: gitea,
|
||||
}
|
||||
req.Opened = &PullRequestOpened{
|
||||
gitea: gitea,
|
||||
}
|
||||
req.Closed = &PullRequestClosed{
|
||||
gitea: gitea,
|
||||
}
|
||||
req.Review = &PullRequestReviewed{
|
||||
gitea: gitea,
|
||||
}
|
||||
|
||||
checker := CreateDefaultStateChecker(*checkOnStart, req, gitea, time.Duration(*checkIntervalHours)*time.Hour)
|
||||
go checker.ConsistencyCheckProcess()
|
||||
|
||||
var defs common.ListenDefinitions
|
||||
|
||||
defs.GitAuthor = GitAuthor
|
||||
defs.RabbitURL, _ = url.Parse(*rabbitUrl)
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_PR] = req
|
||||
defs.Handlers[common.RequestType_PRSync] = req
|
||||
defs.Handlers[common.RequestType_PRReviewAccepted] = req
|
||||
defs.Handlers[common.RequestType_PRReviewRejected] = req
|
||||
|
||||
log.Fatal(defs.ProcessRabbitMQEvents())
|
||||
}
|
204
workflow-pr/main_test.go
Normal file
204
workflow-pr/main_test.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
// "go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
// "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestProjectBranchName(t *testing.T) {
|
||||
req := common.PullRequestWebhookEvent{
|
||||
Repository: &common.Repository{
|
||||
Name: "testingRepo",
|
||||
},
|
||||
Pull_Request: &common.PullRequest{
|
||||
Number: 10,
|
||||
},
|
||||
}
|
||||
|
||||
branchName := prGitBranchNameForPR(&req)
|
||||
if branchName != "PR_testingRepo#10" {
|
||||
t.Error("Unexpected branch name:", branchName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProjctGitSync(t *testing.T) {
|
||||
req := common.PullRequestWebhookEvent{
|
||||
Action: "pull",
|
||||
Number: 0,
|
||||
}
|
||||
|
||||
if err := processPrjGitPullRequestSync(&req); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
const LocalCMD = "---"
|
||||
|
||||
func gitExecs(t *testing.T, git *common.GitHandlerImpl, cmds [][]string) {
|
||||
for _, cmd := range cmds {
|
||||
if cmd[0] == LocalCMD {
|
||||
command := exec.Command(cmd[2], cmd[3:]...)
|
||||
command.Dir = filepath.Join(git.GitPath, cmd[1])
|
||||
command.Stdin = nil
|
||||
command.Env = append([]string{"GIT_CONFIG_COUNT=1", "GIT_CONFIG_KEY_1=protocol.file.allow", "GIT_CONFIG_VALUE_1=always"}, common.ExtraGitParams...)
|
||||
_, err := command.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Errorf(" *** error: %v\n", err)
|
||||
}
|
||||
} else {
|
||||
git.GitExecOrPanic(cmd[0], cmd[1:]...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func commandsForPackages(dir, prefix string, startN, endN int) [][]string {
|
||||
commands := make([][]string, (endN-startN+2)*6)
|
||||
|
||||
if dir == "" {
|
||||
dir = "."
|
||||
}
|
||||
cmdIdx := 0
|
||||
for idx := startN; idx <= endN; idx++ {
|
||||
pkgDir := fmt.Sprintf("%s%d", prefix, idx)
|
||||
|
||||
commands[cmdIdx+0] = []string{"", "init", "-q", "--object-format", "sha256", "-b", "testing", pkgDir}
|
||||
commands[cmdIdx+1] = []string{LocalCMD, pkgDir, "/usr/bin/touch", "testFile"}
|
||||
commands[cmdIdx+2] = []string{pkgDir, "add", "testFile"}
|
||||
commands[cmdIdx+3] = []string{pkgDir, "commit", "-m", "added testFile"}
|
||||
commands[cmdIdx+4] = []string{pkgDir, "config", "receive.denyCurrentBranch", "ignore"}
|
||||
commands[cmdIdx+5] = []string{"prj", "submodule", "add", filepath.Join("..", pkgDir), filepath.Join(dir, pkgDir)}
|
||||
|
||||
cmdIdx += 6
|
||||
}
|
||||
|
||||
// add all the submodules to the prj
|
||||
commands[cmdIdx+0] = []string{"prj", "commit", "-a", "-m", "adding subpackages"}
|
||||
|
||||
return commands
|
||||
}
|
||||
|
||||
func setupGitForTests(t *testing.T, git *common.GitHandlerImpl) {
|
||||
common.ExtraGitParams = []string{
|
||||
"GIT_CONFIG_COUNT=1",
|
||||
"GIT_CONFIG_KEY_0=protocol.file.allow",
|
||||
"GIT_CONFIG_VALUE_0=always",
|
||||
|
||||
"GIT_AUTHOR_NAME=testname",
|
||||
"GIT_AUTHOR_EMAIL=test@suse.com",
|
||||
"GIT_AUTHOR_DATE='2005-04-07T22:13:13'",
|
||||
"GIT_COMMITTER_NAME=testname",
|
||||
"GIT_COMMITTER_EMAIL=test@suse.com",
|
||||
"GIT_COMMITTER_DATE='2005-04-07T22:13:13'",
|
||||
}
|
||||
|
||||
gitExecs(t, git, [][]string{
|
||||
{"", "init", "-q", "--object-format", "sha256", "-b", "testing", "prj"},
|
||||
{"", "init", "-q", "--object-format", "sha256", "-b", "testing", "foo"},
|
||||
{LocalCMD, "foo", "/usr/bin/touch", "file1"},
|
||||
{"foo", "add", "file1"},
|
||||
{"foo", "commit", "-m", "first commit"},
|
||||
{"prj", "config", "receive.denyCurrentBranch", "ignore"},
|
||||
{"prj", "submodule", "init"},
|
||||
{"prj", "submodule", "add", "../foo", "testRepo"},
|
||||
{"prj", "add", ".gitmodules", "testRepo"},
|
||||
{"prj", "commit", "-m", "First instance"},
|
||||
{"prj", "submodule", "deinit", "testRepo"},
|
||||
{LocalCMD, "foo", "/usr/bin/touch", "file2"},
|
||||
{"foo", "add", "file2"},
|
||||
{"foo", "commit", "-m", "added file2"},
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpdatePrBranch(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
origLogger := log.Writer()
|
||||
log.SetOutput(&buf)
|
||||
defer log.SetOutput(origLogger)
|
||||
|
||||
req := &common.PullRequestWebhookEvent{
|
||||
Repository: &common.Repository{
|
||||
Name: "testRepo",
|
||||
},
|
||||
Pull_Request: &common.PullRequest{},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
DebugLogger: true,
|
||||
GitCommiter: "TestCommiter",
|
||||
GitEmail: "test@testing",
|
||||
GitPath: t.TempDir(),
|
||||
}
|
||||
|
||||
setupGitForTests(t, git)
|
||||
gitExecs(t, git, [][]string{{"", "clone", "prj", common.DefaultGitPrj}})
|
||||
|
||||
revs := strings.Split(git.GitExecWithOutputOrPanic("foo", "rev-list", "HEAD"), "\n")
|
||||
req.Pull_Request.Base.Sha = strings.TrimSpace(revs[1])
|
||||
req.Pull_Request.Head.Sha = strings.TrimSpace(revs[0])
|
||||
|
||||
updateSubmoduleInPR(req, git)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", "created commit"))
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", "origin", "+HEAD:+testing"))
|
||||
git.GitExecOrPanic("prj", "reset", "--hard", "testing")
|
||||
rev := strings.TrimSpace(git.GitExecWithOutputOrPanic(filepath.Join(common.DefaultGitPrj, "testRepo"), "rev-list", "-1", "HEAD"))
|
||||
if rev != req.Pull_Request.Head.Sha {
|
||||
t.Error("prj/testRepo not updated to", req.Pull_Request.Head.Sha, "but is at", rev)
|
||||
t.Error(buf.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreatePrBranch(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
origLogger := log.Writer()
|
||||
log.SetOutput(&buf)
|
||||
defer log.SetOutput(origLogger)
|
||||
|
||||
req := &common.PullRequestWebhookEvent{
|
||||
Repository: &common.Repository{
|
||||
Name: "testRepo",
|
||||
},
|
||||
Pull_Request: &common.PullRequest{},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
DebugLogger: true,
|
||||
GitCommiter: "TestCommiter",
|
||||
GitEmail: "test@testing",
|
||||
GitPath: t.TempDir(),
|
||||
}
|
||||
|
||||
setupGitForTests(t, git)
|
||||
gitExecs(t, git, [][]string{{"", "clone", "prj", common.DefaultGitPrj}})
|
||||
|
||||
revs := strings.Split(git.GitExecWithOutputOrPanic("foo", "rev-list", "HEAD"), "\n")
|
||||
req.Pull_Request.Base.Sha = strings.TrimSpace(revs[1])
|
||||
req.Pull_Request.Head.Sha = strings.TrimSpace(revs[0])
|
||||
|
||||
updateSubmoduleInPR(req, git)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", "created commit"))
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", "origin", "+HEAD:testingCreated"))
|
||||
|
||||
rev := strings.TrimSpace(git.GitExecWithOutputOrPanic(filepath.Join(common.DefaultGitPrj, "testRepo"), "rev-list", "-1", "HEAD"))
|
||||
if rev != req.Pull_Request.Head.Sha {
|
||||
t.Error("prj/testRepo not updated to", req.Pull_Request.Head.Sha, "but is at", rev)
|
||||
t.Error(buf.String())
|
||||
}
|
||||
|
||||
os.CopyFS("/tmp/test", os.DirFS(git.GitPath))
|
||||
git.GitExecOrPanic("prj", "reset", "--hard", "testingCreated")
|
||||
rev = strings.TrimSpace(git.GitExecWithOutputOrPanic("prj", "submodule", "status", "testRepo"))[1 : len(req.Pull_Request.Head.Sha)+1]
|
||||
if rev != req.Pull_Request.Head.Sha {
|
||||
t.Error("prj/testRepo not updated to", req.Pull_Request.Head.Sha, "but is at", rev)
|
||||
t.Error(buf.String())
|
||||
}
|
||||
}
|
67
workflow-pr/pr_processor.go
Normal file
67
workflow-pr/pr_processor.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package main
|
||||
|
||||
//go:generate mockgen -source=pr_processor.go -destination=mock/pr_processor.go -typed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type PullRequestProcessor interface {
|
||||
Process(req *common.PullRequestWebhookEvent, git common.Git, config *common.AutogitConfig) error
|
||||
}
|
||||
|
||||
type RequestProcessor struct {
|
||||
Opened, Synced, Closed, Review PullRequestProcessor
|
||||
|
||||
configuredRepos map[string][]*common.AutogitConfig
|
||||
git common.GitHandlerGenerator
|
||||
}
|
||||
|
||||
func (w *RequestProcessor) ProcessFunc(request *common.Request) error {
|
||||
req, ok := request.Data.(*common.PullRequestWebhookEvent)
|
||||
if !ok {
|
||||
return fmt.Errorf("*** Invalid data format for PR processing.")
|
||||
}
|
||||
|
||||
configs := w.configuredRepos[req.Repository.Owner.Username]
|
||||
if len(configs) < 1 {
|
||||
// ignoring pull request against unconfigured project (could be just regular sources?)
|
||||
return nil
|
||||
}
|
||||
|
||||
var config *common.AutogitConfig
|
||||
for _, c := range configs {
|
||||
if c.GitProjectName == req.Pull_Request.Base.Repo.Name ||
|
||||
c.Branch == req.Pull_Request.Base.Ref {
|
||||
|
||||
config = c
|
||||
break
|
||||
}
|
||||
}
|
||||
if config == nil {
|
||||
return fmt.Errorf("Cannot find config for branch '%s'", req.Pull_Request.Base.Ref)
|
||||
}
|
||||
|
||||
git, err := w.git.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error allocating GitHandler. Err: %w", err)
|
||||
}
|
||||
|
||||
switch req.Action {
|
||||
case "opened", "reopened":
|
||||
return w.Opened.Process(req, git, config)
|
||||
case "synchronized":
|
||||
return w.Synced.Process(req, git, config)
|
||||
case "edited":
|
||||
// not need to be handled??
|
||||
return nil
|
||||
case "closed":
|
||||
return w.Closed.Process(req, git, config)
|
||||
case "reviewed":
|
||||
return w.Review.Process(req, git, config)
|
||||
}
|
||||
|
||||
return fmt.Errorf("Unhandled pull request action: %s", req.Action)
|
||||
}
|
38
workflow-pr/pr_processor_closed.go
Normal file
38
workflow-pr/pr_processor_closed.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type PullRequestClosed struct {
|
||||
gitea common.Gitea
|
||||
}
|
||||
|
||||
func (*PullRequestClosed) Process(req *common.PullRequestWebhookEvent, git common.Git, config *common.AutogitConfig) error {
|
||||
if req.Repository.Name != config.GitProjectName {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Println("request was:", req.Pull_Request.State)
|
||||
|
||||
return nil
|
||||
/*
|
||||
req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
if req.Repository.Name != common.DefaultGitPrj {
|
||||
// we only handle project git PR updates here
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := fetchPrGit(h, req.Pull_Request); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
headSubmodules := h.GitSubmoduleList(dir, pr.Head.Sha)
|
||||
baseSubmodules := h.GitSubmoduleList(dir, pr.Base.Sha)
|
||||
return nil
|
||||
*/
|
||||
}
|
||||
|
84
workflow-pr/pr_processor_closed_test.go
Normal file
84
workflow-pr/pr_processor_closed_test.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestClosePR(t *testing.T) {
|
||||
pr := PullRequestClosed{}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "branch",
|
||||
Organization: "test",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
Action: "closed",
|
||||
Number: 1,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Id: 1,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Owner: &common.Organization{
|
||||
Username: "test",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
}
|
||||
|
||||
t.Run("PR git closed request against PrjGit == no action", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr.gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "testRepo"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
if err := pr.Process(event, git, config); err != nil {
|
||||
t.Error("Error PrjGit closed request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PR git closed", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr.gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "tester"
|
||||
|
||||
if err := pr.Process(event, git, config); err != nil {
|
||||
t.Error("Error PrjGit closed request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
83
workflow-pr/pr_processor_opened.go
Normal file
83
workflow-pr/pr_processor_opened.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type PullRequestOpened struct {
|
||||
gitea common.Gitea
|
||||
}
|
||||
|
||||
func (o *PullRequestOpened) Process(req *common.PullRequestWebhookEvent, git common.Git, config *common.AutogitConfig) error {
|
||||
// requests against project are not handled here
|
||||
if req.Repository.Name == config.GitProjectName {
|
||||
return nil
|
||||
}
|
||||
|
||||
// create PrjGit branch for buidling the pull request
|
||||
branchName := prGitBranchNameForPR(req)
|
||||
commitMsg := fmt.Sprintf(`auto-created for %s
|
||||
|
||||
This commit was autocreated by %s
|
||||
referencing
|
||||
|
||||
`+common.PrPattern,
|
||||
req.Repository.Owner.Username,
|
||||
req.Repository.Name,
|
||||
GitAuthor,
|
||||
req.Repository.Name,
|
||||
req.Pull_Request.Number,
|
||||
)
|
||||
|
||||
// TODO: fix this for config.Organization
|
||||
prjGit, err := o.gitea.CreateRepositoryIfNotExist(git, config.Organization, config.GitProjectName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", prjGit.SSHURL, common.DefaultGitPrj))
|
||||
err = git.GitExec(common.DefaultGitPrj, "fetch", "origin", branchName+":"+branchName)
|
||||
if err != nil {
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "checkout", "-B", branchName, prjGit.DefaultBranch))
|
||||
} else {
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "checkout", branchName))
|
||||
}
|
||||
subList, err := git.GitSubmoduleList(common.DefaultGitPrj, "HEAD")
|
||||
common.PanicOnError(err)
|
||||
|
||||
if id := subList[req.Repository.Name]; id != req.Pull_Request.Head.Sha {
|
||||
updateSubmoduleInPR(req, git)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", commitMsg))
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", "origin", "+HEAD:"+branchName))
|
||||
}
|
||||
|
||||
PR, err := o.gitea.CreatePullRequestIfNotExist(prjGit, branchName, prjGit.DefaultBranch,
|
||||
fmt.Sprintf("Forwarded PR: %s", req.Repository.Name),
|
||||
fmt.Sprintf(`This is a forwarded pull request by %s
|
||||
referencing the following pull request:
|
||||
|
||||
`+common.PrPattern,
|
||||
GitAuthor, req.Repository.Owner.Username, req.Repository.Name, req.Pull_Request.Number),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
prset, err := common.FetchPRSet(o.gitea, req.Repository.Owner.Username, req.Repository.Name, req.Number, config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// request build review
|
||||
log.Println("num of current reviewers:", len(PR.RequestedReviewers))
|
||||
maintainers, err := common.FetchProjectMaintainershipData(o.gitea, config.Organization, config.GitProjectName, config.Branch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return prset.AssignReviewers(o.gitea, maintainers)
|
||||
}
|
207
workflow-pr/pr_processor_opened_test.go
Normal file
207
workflow-pr/pr_processor_opened_test.go
Normal file
@@ -0,0 +1,207 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestOpenPR(t *testing.T) {
|
||||
pr := PullRequestOpened{}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "branch",
|
||||
Organization: "test",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
Action: "opened",
|
||||
Number: 1,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Id: 1,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
Owner: &common.Organization{
|
||||
Username: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Owner: &common.Organization{
|
||||
Username: "test",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
}
|
||||
|
||||
t.Run("PR git opened request against PrjGit == no action", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr.gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "testRepo"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
if err := pr.Process(event, git, config); err != nil {
|
||||
t.Error("Error PrjGit opened request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Open PrjGit PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
prjgit := &models.Repository{
|
||||
SSHURL: "./prj",
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
giteaPR := &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Repo: &models.Repository{
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
Name: "testRepo",
|
||||
},
|
||||
},
|
||||
User: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
}
|
||||
gitea.EXPECT().GetAssociatedPrjGitPR("test", "prjcopy", "test", "testRepo", int64(1)).Return(nil, nil)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("test", "testRepo", int64(0)).Return([]*models.PullReview{}, nil)
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile("test", "prjcopy", "branch", "_project").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
gitea.EXPECT().FetchMaintainershipFile("test", "prjcopy", "branch").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
if err != nil {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Cannot create prjgit repository", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(nil, failedErr)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
if err != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
t.Run("Cannot create PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
prjgit := &models.Repository{
|
||||
SSHURL: "./prj",
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
if err != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
t.Run("Open PrjGit PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
prjgit := &models.Repository{
|
||||
Name: "SomeRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: "./prj",
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
giteaPR := &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Repo: prjgit,
|
||||
},
|
||||
Index: 13,
|
||||
User: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
}
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().GetAssociatedPrjGitPR("test", "prjcopy", "test", "testRepo", int64(1)).Return(nil, nil)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("org", "SomeRepo", int64(13)).Return([]*models.PullReview{}, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, failedErr)
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile("test", "prjcopy", "branch", "_project").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
gitea.EXPECT().FetchMaintainershipFile("test", "prjcopy", "branch").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
if errors.Unwrap(err) != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
}
|
25
workflow-pr/pr_processor_reviewed.go
Normal file
25
workflow-pr/pr_processor_reviewed.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package main
|
||||
|
||||
import "src.opensuse.org/autogits/common"
|
||||
|
||||
type PullRequestReviewed struct {
|
||||
gitea common.Gitea
|
||||
}
|
||||
|
||||
func (o *PullRequestReviewed) Process(req *common.PullRequestWebhookEvent, git common.Git, config *common.AutogitConfig) error {
|
||||
prset, err := common.FetchPRSet(o.gitea, req.Repository.Owner.Username, req.Repository.Name, req.Number, config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
maintainers, err := common.FetchProjectMaintainershipData(o.gitea, prset.Config.Organization, prset.Config.GitProjectName, prset.Config.Branch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if prset.IsApproved(o.gitea, maintainers) {
|
||||
prset.Merge(GitAuthor, GitEmail)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
63
workflow-pr/pr_processor_reviewed_test.go
Normal file
63
workflow-pr/pr_processor_reviewed_test.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
)
|
||||
/*
|
||||
func TestPRReviewed(t *testing.T) {
|
||||
testData := []struct {
|
||||
title string
|
||||
error error
|
||||
}{
|
||||
{
|
||||
title: "forward project review",
|
||||
},
|
||||
}
|
||||
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
Action: "reviewed",
|
||||
Number: 1,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Id: 1,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &common.Organization{
|
||||
Username: "test",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testData {
|
||||
t.Run(test.title, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
s := PullRequestReviewed{
|
||||
gitea: mock,
|
||||
}
|
||||
|
||||
mock.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(nil, nil)
|
||||
|
||||
if err := s.Process(event, nil, nil); err != test.error {
|
||||
t.Error("unexected error:", err, "Expected:", test.error)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
*/
|
83
workflow-pr/pr_processor_sync.go
Normal file
83
workflow-pr/pr_processor_sync.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"log"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func prGitBranchNameForPR(req *common.PullRequestWebhookEvent) string {
|
||||
return fmt.Sprintf("PR_%s#%d", req.Repository.Name, req.Pull_Request.Number)
|
||||
}
|
||||
|
||||
func updateSubmoduleInPR(req *common.PullRequestWebhookEvent, git common.Git) {
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "submodule", "update", "--init", "--checkout", "--depth", "1", req.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(path.Join(common.DefaultGitPrj, req.Repository.Name), "fetch", "--depth", "1", "origin", req.Pull_Request.Head.Sha))
|
||||
common.PanicOnError(git.GitExec(path.Join(common.DefaultGitPrj, req.Repository.Name), "checkout", req.Pull_Request.Head.Sha))
|
||||
}
|
||||
|
||||
func processPrjGitPullRequestSync(req *common.PullRequestWebhookEvent) error {
|
||||
// req := h.Data.(*common.PullRequestAction)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type PullRequestSynced struct {
|
||||
gitea common.Gitea
|
||||
}
|
||||
|
||||
func (o *PullRequestSynced) Process(req *common.PullRequestWebhookEvent, git common.Git, config *common.AutogitConfig) error {
|
||||
if req.Repository.Name == config.GitProjectName {
|
||||
return processPrjGitPullRequestSync(req)
|
||||
}
|
||||
|
||||
// need to verify that submodule in the PR for prjgit
|
||||
// is still pointing to the HEAD of the PR
|
||||
pr, err := o.gitea.GetPullRequest(req.Repository.Owner.Username, req.Repository.Name, req.Number)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot fetch PR data from gitea: %w", err)
|
||||
}
|
||||
|
||||
_, prs := common.ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
if len(prs) != 1 {
|
||||
return fmt.Errorf("Package update associated with invalid number of projects. Expected 1. Got %d", len(prs))
|
||||
}
|
||||
|
||||
prjPr, err := o.gitea.GetPullRequest(prs[0].Org, prs[0].Repo, prs[0].Num)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot get PrjGit PR in processPullRequestSync. Err: %w", err)
|
||||
}
|
||||
|
||||
common.PanicOnError(git.GitExec("", "clone", "--branch", prjPr.Head.Name, "--depth", "1", prjPr.Head.Repo.SSHURL, common.DefaultGitPrj))
|
||||
commitId, ok := git.GitSubmoduleCommitId(common.DefaultGitPrj, req.Repository.Name, prjPr.Head.Sha)
|
||||
|
||||
if !ok {
|
||||
return fmt.Errorf("Cannot fetch submodule commit id in prjgit for '%s'", req.Repository.Name)
|
||||
}
|
||||
|
||||
// nothing changed, still in sync
|
||||
if commitId == req.Pull_Request.Head.Sha {
|
||||
log.Println("commitID already match - nothing to do")
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("different ids: '%s' vs. '%s'\n", req.Pull_Request.Head.Sha, commitId)
|
||||
|
||||
commitMsg := fmt.Sprintf(`Sync PR
|
||||
|
||||
Update to %s`, req.Pull_Request.Head.Sha)
|
||||
|
||||
log.Println("will create new commit msg:", commitMsg)
|
||||
|
||||
// we need to update prjgit PR with the new head hash
|
||||
branchName := prGitBranchNameForPR(req)
|
||||
updateSubmoduleInPR(req, git)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", commitMsg))
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", "origin", "+HEAD:"+branchName))
|
||||
return nil
|
||||
}
|
235
workflow-pr/pr_processor_sync_test.go
Normal file
235
workflow-pr/pr_processor_sync_test.go
Normal file
@@ -0,0 +1,235 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestSyncPR(t *testing.T) {
|
||||
pr := PullRequestSynced{}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "testing",
|
||||
Organization: "test",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
Action: "syncronized",
|
||||
Number: 42,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Number: 42,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "8a6a69a4232cabda04a4d9563030aa888ff5482f75aa4c6519da32a951a072e2",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &common.Organization{
|
||||
Username: config.Organization,
|
||||
},
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Owner: &common.Organization{
|
||||
Username: config.Organization,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
modelPR := &models.PullRequest{
|
||||
Index: 42,
|
||||
Body: "PR: test/prj#24",
|
||||
Base: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
Sha: "8a6a69a4232cabda04a4d9563030aa888ff5482f75aa4c6519da32a951a072e2",
|
||||
Repo: &models.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
DefaultBranch: "main1",
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
Sha: "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985",
|
||||
Repo: &models.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
DefaultBranch: "main1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
PrjGitPR := &models.PullRequest{
|
||||
Title: "some pull request",
|
||||
Body: "PR: test/testRepo#42",
|
||||
Index: 24,
|
||||
Head: &models.PRBranchInfo{
|
||||
Name: "testing",
|
||||
Sha: "db8adab91edb476b9762097d10c6379aa71efd6b60933a1c0e355ddacf419a95",
|
||||
Repo: &models.Repository{
|
||||
SSHURL: "./prj",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
}
|
||||
|
||||
t.Run("PR sync request against PrjGit == no action", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr.gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "testRepo"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
if err := pr.Process(event, git, config); err != nil {
|
||||
t.Error("Error PrjGit sync request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Missing submodule in prjgit", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
oldSha := PrjGitPR.Head.Sha
|
||||
defer func() { PrjGitPR.Head.Sha = oldSha }()
|
||||
PrjGitPR.Head.Sha = "ab8adab91edb476b9762097d10c6379aa71efd6b60933a1c0e355ddacf419a95"
|
||||
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
|
||||
if err == nil || err.Error() != "Cannot fetch submodule commit id in prjgit for 'testRepo'" {
|
||||
t.Error("Invalid error received.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Missing PrjGit PR for the sync", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "tester"
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
expectedErr := errors.New("Missing PR should throw error")
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "tester", event.Pull_Request.Number).Return(modelPR, expectedErr)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
|
||||
if err == nil || errors.Unwrap(err) != expectedErr {
|
||||
t.Error("Invalid error received.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PR sync", func(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
w := log.Writer()
|
||||
log.SetOutput(&b)
|
||||
defer log.SetOutput(w)
|
||||
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
pr.gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
git.DebugLogger = true
|
||||
DebugMode = true
|
||||
// mock.EXPECT().GetAssociatedPrjGitPR(event).Return(PrjGitPR, nil)
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
|
||||
if err != nil {
|
||||
t.Error("Invalid error received.", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
// check that we actually created the branch in the prjgit
|
||||
id, ok := git.GitSubmoduleCommitId("prj", "testRepo", "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c")
|
||||
if id != "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985" || !ok {
|
||||
t.Error("Failed creating PR")
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
/*
|
||||
* does nothing on next sync of already synced data -- PR is updated
|
||||
*/
|
||||
os.RemoveAll(path.Join(git.GitPath, common.DefaultGitPrj))
|
||||
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
err = pr.Process(event, git, config)
|
||||
|
||||
if err != nil {
|
||||
t.Error("Invalid error received.", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
// check that we actually created the branch in the prjgit
|
||||
id, ok = git.GitSubmoduleCommitId("prj", "testRepo", "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c")
|
||||
if id != "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985" || !ok {
|
||||
t.Error("Failed creating PR")
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
if id, err := git.GitBranchHead("prj", "PR_testRepo#42"); id != "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c" || err != nil {
|
||||
t.Error("no branch?", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
if !strings.Contains(b.String(), "commitID already match - nothing to do") {
|
||||
// os.CopyFS("/tmp/test", os.DirFS(git.GitPath))
|
||||
t.Log(b.String())
|
||||
}
|
||||
})
|
||||
}
|
215
workflow-pr/pr_processor_test.go
Normal file
215
workflow-pr/pr_processor_test.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
mock_main "src.opensuse.org/workflow-pr/mock"
|
||||
)
|
||||
|
||||
func TestPRProcessor(t *testing.T) {
|
||||
tests := []struct {
|
||||
title string
|
||||
action string
|
||||
req func(req *RequestProcessor, mock PullRequestProcessor)
|
||||
}{
|
||||
{
|
||||
title: "Open routine called for PR opening",
|
||||
action: "opened",
|
||||
req: func(req *RequestProcessor, mock PullRequestProcessor) {
|
||||
req.Opened = mock
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Re-Open routine called for PR reopening",
|
||||
action: "reopened",
|
||||
req: func(req *RequestProcessor, mock PullRequestProcessor) {
|
||||
req.Opened = mock
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Sync routine called for PR sync requests",
|
||||
action: "synchronized",
|
||||
req: func(req *RequestProcessor, mock PullRequestProcessor) {
|
||||
req.Synced = mock
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Close routine called for PR closing",
|
||||
action: "closed",
|
||||
req: func(req *RequestProcessor, mock PullRequestProcessor) {
|
||||
req.Closed = mock
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Close routine called for PR closing",
|
||||
action: "reviewed",
|
||||
req: func(req *RequestProcessor, mock PullRequestProcessor) {
|
||||
req.Review = mock
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var logBuf bytes.Buffer
|
||||
oldOut := log.Writer()
|
||||
log.SetOutput(&logBuf)
|
||||
defer log.SetOutput(oldOut)
|
||||
|
||||
testConfiguration := make(map[string][]*common.AutogitConfig)
|
||||
|
||||
testConfiguration["test"] = make([]*common.AutogitConfig, 1, 1)
|
||||
testConfiguration["test"][0] = &common.AutogitConfig{
|
||||
Branch: "branch",
|
||||
}
|
||||
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
// Action: "opened",
|
||||
Number: 1,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Id: 1,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Owner: &common.Organization{
|
||||
Username: "test",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.title, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_main.NewMockPullRequestProcessor(ctl)
|
||||
mock.EXPECT().Process(event, gomock.Any(), testConfiguration["test"][0]).Return(nil)
|
||||
|
||||
req := &RequestProcessor{
|
||||
configuredRepos: testConfiguration,
|
||||
git: &common.GitHandlerGeneratorImpl{},
|
||||
}
|
||||
test.req(req, mock)
|
||||
|
||||
event.Action = test.action
|
||||
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
t.Error("Error processing open PR:", err)
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
req := &RequestProcessor{
|
||||
configuredRepos: testConfiguration,
|
||||
git: &common.GitHandlerGeneratorImpl{},
|
||||
}
|
||||
|
||||
t.Run("Edit PR handling", func(t *testing.T) {
|
||||
/* ctl := gomock.NewController(t)
|
||||
closedMock := mock_main.NewMockPullRequestProcessor(ctl)
|
||||
closedMock.EXPECT().Process(event, gomock.Any(), testConfiguration["test"][0]).Return(nil)
|
||||
*/
|
||||
|
||||
// req.Closed = closedMock
|
||||
event.Action = "edited"
|
||||
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
t.Error("Error processing edit PR:", err)
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Unknown PR-type handling", func(t *testing.T) {
|
||||
event.Action = "not existing action"
|
||||
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
|
||||
if err == nil {
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Missing branch in config present in PR", func(t *testing.T) {
|
||||
baseRef := event.Pull_Request.Base.Ref
|
||||
event.Pull_Request.Base.Ref = "not present"
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
event.Pull_Request.Base.Ref = baseRef
|
||||
|
||||
if err == nil {
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Invalid data present in PR", func(t *testing.T) {
|
||||
baseConfig := req.configuredRepos
|
||||
req.configuredRepos = make(map[string][]*common.AutogitConfig)
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: nil,
|
||||
})
|
||||
req.configuredRepos = baseConfig
|
||||
|
||||
if err == nil {
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Ignoring requests against unconfigured repos", func(t *testing.T) {
|
||||
baseConfig := req.configuredRepos
|
||||
req.configuredRepos = make(map[string][]*common.AutogitConfig)
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
req.configuredRepos = baseConfig
|
||||
|
||||
if err != nil {
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Failures of git handler creation", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitHandler := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
|
||||
gitHandler.EXPECT().CreateGitHandler(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fmt.Errorf("some error"))
|
||||
|
||||
origHandler := req.git
|
||||
req.git = gitHandler
|
||||
|
||||
err := req.ProcessFunc(&common.Request{
|
||||
Data: event,
|
||||
})
|
||||
|
||||
req.git = origHandler
|
||||
|
||||
if err == nil {
|
||||
t.Error(logBuf.String())
|
||||
}
|
||||
})
|
||||
}
|
204
workflow-pr/repo_check.go
Normal file
204
workflow-pr/repo_check.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"math/rand"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=repo_check.go -destination=mock/repo_check.go -typed
|
||||
|
||||
type StateChecker interface {
|
||||
VerifyProjectState(orgName string, configs []*common.AutogitConfig, idx int) error
|
||||
CheckRepos() error
|
||||
ConsistencyCheckProcess() error
|
||||
}
|
||||
|
||||
type DefaultStateChecker struct {
|
||||
exitCheckLoop bool
|
||||
checkOnStart bool
|
||||
checkInterval time.Duration
|
||||
|
||||
gitea common.Gitea
|
||||
git common.GitHandlerGenerator
|
||||
processor *RequestProcessor
|
||||
i StateChecker
|
||||
}
|
||||
|
||||
func CreateDefaultStateChecker(checkOnStart bool, processor *RequestProcessor, gitea common.Gitea, interval time.Duration) *DefaultStateChecker {
|
||||
var s = &DefaultStateChecker{
|
||||
git: &common.GitHandlerGeneratorImpl{},
|
||||
gitea: gitea,
|
||||
checkInterval: interval,
|
||||
checkOnStart: checkOnStart,
|
||||
processor: processor,
|
||||
}
|
||||
s.i = s
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) VerifyProjectState(org string, configs []*common.AutogitConfig, idx int) error {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
log.Println(err)
|
||||
}
|
||||
log.Println(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
git, err := s.git.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot create git handler: %w", err)
|
||||
}
|
||||
|
||||
config := configs[idx]
|
||||
repo, err := s.gitea.CreateRepositoryIfNotExist(git, org, config.GitProjectName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching or creating '%s/%s' -- aborting verifyProjectState(). Err: %w", org, config.GitProjectName, err)
|
||||
}
|
||||
|
||||
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", repo.SSHURL, config.GitProjectName))
|
||||
log.Println("getting submodule list")
|
||||
submodules, err := git.GitSubmoduleList(config.GitProjectName, "HEAD")
|
||||
|
||||
nextSubmodule:
|
||||
for sub, commitID := range submodules {
|
||||
log.Println(" + checking", sub, commitID)
|
||||
submoduleName := sub
|
||||
if n := strings.LastIndex(sub, "/"); n != -1 {
|
||||
submoduleName = sub[n+1:]
|
||||
}
|
||||
|
||||
// check if open PR have PR against project
|
||||
prs, err := s.gitea.GetRecentPullRequests(config.Organization, submoduleName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching pull requests for %s/%s. Err: %w", config.Organization, submoduleName, err)
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
log.Println(" - # of PRs to check:", len(prs))
|
||||
}
|
||||
|
||||
for _, pr := range prs {
|
||||
var event common.PullRequestWebhookEvent
|
||||
|
||||
event.Pull_Request = common.PullRequestFromModel(pr)
|
||||
event.Action = string(pr.State)
|
||||
event.Number = pr.Index
|
||||
event.Repository = common.RepositoryFromModel(pr.Base.Repo)
|
||||
event.Sender = *common.UserFromModel(pr.User)
|
||||
event.Requested_reviewer = nil
|
||||
|
||||
git, err := s.git.CreateGitHandler(GitAuthor, GitEmail, AppName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error allocating GitHandler. Err: %w", err)
|
||||
}
|
||||
if !DebugMode {
|
||||
defer git.Close()
|
||||
}
|
||||
|
||||
switch pr.State {
|
||||
case "open":
|
||||
err = s.processor.Opened.Process(&event, git, config)
|
||||
case "closed":
|
||||
err = s.processor.Closed.Process(&event, git, config)
|
||||
default:
|
||||
return fmt.Errorf("Unhandled pull request state: '%s'. %s/%s/%d", pr.State, config.Organization, submoduleName, pr.Index)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Println(" * processor error returned:", err)
|
||||
}
|
||||
}
|
||||
|
||||
// check if the commited changes are syned with branches
|
||||
commits, err := s.gitea.GetRecentCommits(config.Organization, submoduleName, config.Branch, 10)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching recent commits for %s/%s. Err: %w", config.Organization, submoduleName, err)
|
||||
}
|
||||
|
||||
for idx, commit := range commits {
|
||||
if commit.SHA == commitID {
|
||||
if idx != 0 {
|
||||
// commit in past ...
|
||||
log.Println(" W -", submoduleName, " is behind the branch by", idx, "This should not happen in PR workflow alone")
|
||||
}
|
||||
continue nextSubmodule
|
||||
}
|
||||
}
|
||||
|
||||
// not found in past, check if we should advance the branch label ... pull the submodule
|
||||
git.GitExecOrPanic(config.GitProjectName, "submodule", "update", "--init", "--filter", "blob:none", "--", sub)
|
||||
subDir := path.Join(config.GitProjectName, sub)
|
||||
newCommits := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(subDir, "rev-list", "^origin/"+config.Branch, commitID), "\n")
|
||||
|
||||
if len(newCommits) >= 1 {
|
||||
if DebugMode {
|
||||
log.Println(" - updating branch", config.Branch, "to new head", commitID, " - len:", len(newCommits))
|
||||
}
|
||||
git.GitExecOrPanic(subDir, "checkout", "-B", config.Branch, commitID)
|
||||
url := git.GitExecWithOutputOrPanic(subDir, "remote", "get-url", "origin", "--push")
|
||||
sshUrl, err := common.TranslateHttpsToSshUrl(strings.TrimSpace(url))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot traslate HTTPS git URL to SSH_URL. %w", err)
|
||||
}
|
||||
git.GitExecOrPanic(subDir, "remote", "set-url", "origin", "--push", sshUrl)
|
||||
git.GitExecOrPanic(subDir, "push", "origin", config.Branch)
|
||||
}
|
||||
}
|
||||
|
||||
// forward any package-gits referred by the project git, but don't go back
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) CheckRepos() error {
|
||||
errorList := make([]error, 0, 10)
|
||||
|
||||
for org, configs := range s.processor.configuredRepos {
|
||||
for configIdx, config := range configs {
|
||||
if s.checkInterval > 0 {
|
||||
sleepInterval := (s.checkInterval - s.checkInterval/2) + time.Duration(rand.Int63n(int64(s.checkInterval)))
|
||||
log.Println(" - sleep interval", sleepInterval, "until next check")
|
||||
time.Sleep(sleepInterval)
|
||||
}
|
||||
|
||||
log.Printf(" ++ starting verification, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
if err := s.i.VerifyProjectState(org, configs, configIdx); err != nil {
|
||||
log.Printf(" *** verification failed, org: `%s`, err: %#v\n", org, err)
|
||||
errorList = append(errorList, err)
|
||||
}
|
||||
log.Printf(" ++ verification complete, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Join(errorList...)
|
||||
}
|
||||
|
||||
func (s *DefaultStateChecker) ConsistencyCheckProcess() error {
|
||||
if s.checkOnStart {
|
||||
savedCheckInterval := s.checkInterval
|
||||
s.checkInterval = 0
|
||||
log.Println("== Startup consistency check begin...")
|
||||
s.i.CheckRepos()
|
||||
log.Println("== Startup consistency check done...")
|
||||
s.checkInterval = savedCheckInterval
|
||||
}
|
||||
|
||||
for {
|
||||
if s.exitCheckLoop {
|
||||
break
|
||||
}
|
||||
s.i.CheckRepos()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
283
workflow-pr/repo_check_test.go
Normal file
283
workflow-pr/repo_check_test.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"log"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
mock_main "src.opensuse.org/workflow-pr/mock"
|
||||
)
|
||||
|
||||
func TestRepoCheck(t *testing.T) {
|
||||
var logBuf bytes.Buffer
|
||||
oldOut := log.Writer()
|
||||
log.SetOutput(&logBuf)
|
||||
defer log.SetOutput(oldOut)
|
||||
|
||||
t.Run("Consistency Check On Start", func(t *testing.T) {
|
||||
c := CreateDefaultStateChecker(true, nil, nil, 100)
|
||||
ctl := gomock.NewController(t)
|
||||
state := mock_main.NewMockStateChecker(ctl)
|
||||
c.i = state
|
||||
state.EXPECT().CheckRepos().Do(func() error {
|
||||
// only checkOnStart has checkInterval = 0
|
||||
if c.checkInterval != 0 {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
c.exitCheckLoop = true
|
||||
return nil
|
||||
})
|
||||
|
||||
c.ConsistencyCheckProcess()
|
||||
if c.checkInterval != 100 {
|
||||
t.Fail()
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("No consistency Check On Start", func(t *testing.T) {
|
||||
c := CreateDefaultStateChecker(true, nil, nil, 100)
|
||||
ctl := gomock.NewController(t)
|
||||
state := mock_main.NewMockStateChecker(ctl)
|
||||
c.i = state
|
||||
|
||||
nCalls := 10
|
||||
state.EXPECT().CheckRepos().Do(func() error {
|
||||
// only checkOnStart has checkInterval = 0
|
||||
if c.checkInterval != 100 {
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
nCalls--
|
||||
if nCalls == 0 {
|
||||
c.exitCheckLoop = true
|
||||
}
|
||||
return nil
|
||||
}).Times(nCalls)
|
||||
c.checkOnStart = false
|
||||
|
||||
c.ConsistencyCheckProcess()
|
||||
})
|
||||
|
||||
t.Run("CheckRepos() calls CheckProjectState() for each project", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
state := mock_main.NewMockStateChecker(ctl)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
config1 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo1",
|
||||
Organization: "repo1_org",
|
||||
}
|
||||
config2 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo2",
|
||||
Organization: "repo2_org",
|
||||
}
|
||||
config3 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo3",
|
||||
Organization: "repo3_org",
|
||||
}
|
||||
|
||||
configs := &RequestProcessor{
|
||||
configuredRepos: map[string][]*common.AutogitConfig{
|
||||
"repo1_org": []*common.AutogitConfig{config1},
|
||||
"repo2_org": []*common.AutogitConfig{config2},
|
||||
"repo3_org": []*common.AutogitConfig{config3},
|
||||
},
|
||||
}
|
||||
r := configs.configuredRepos
|
||||
|
||||
c := CreateDefaultStateChecker(true, configs, gitea, 100)
|
||||
c.i = state
|
||||
|
||||
state.EXPECT().VerifyProjectState("repo1_org", r["repo1_org"], 0)
|
||||
state.EXPECT().VerifyProjectState("repo2_org", r["repo2_org"], 0)
|
||||
state.EXPECT().VerifyProjectState("repo3_org", r["repo3_org"], 0)
|
||||
|
||||
if err := c.CheckRepos(); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("CheckRepos errors", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
state := mock_main.NewMockStateChecker(ctl)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
git := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
|
||||
config1 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo1",
|
||||
Organization: "repo1_org",
|
||||
}
|
||||
|
||||
configs := &RequestProcessor{
|
||||
configuredRepos: map[string][]*common.AutogitConfig{
|
||||
"repo1_org": []*common.AutogitConfig{config1},
|
||||
},
|
||||
}
|
||||
//r := configs.configuredRepos
|
||||
|
||||
c := CreateDefaultStateChecker(true, configs, gitea, 100)
|
||||
c.i = state
|
||||
c.git = git
|
||||
|
||||
err := errors.New("test error")
|
||||
state.EXPECT().VerifyProjectState("repo1_org", gomock.Any(), 0).Return(err)
|
||||
|
||||
r := c.CheckRepos()
|
||||
|
||||
if !errors.Is(r, err) {
|
||||
t.Error(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type testGit struct {
|
||||
git *common.GitHandlerImpl
|
||||
}
|
||||
|
||||
func (s *testGit) CreateGitHandler(a, b, c string) (common.Git, error) {
|
||||
return s.git, nil
|
||||
}
|
||||
|
||||
func (s *testGit) ReadExistingPath(a,b,c string) (common.Git, error) {
|
||||
return nil, errors.New("should not be called")
|
||||
}
|
||||
|
||||
func TestVerifyProjectState(t *testing.T) {
|
||||
var logBuf bytes.Buffer
|
||||
oldOut := log.Writer()
|
||||
log.SetOutput(&logBuf)
|
||||
defer log.SetOutput(oldOut)
|
||||
|
||||
t.Run("Project state with no PRs", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
DebugLogger: true,
|
||||
GitCommiter: "TestCommiter",
|
||||
GitEmail: "test@testing",
|
||||
GitPath: t.TempDir(),
|
||||
}
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
org := "repo1_org"
|
||||
config1 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo1",
|
||||
Organization: "repo1_org",
|
||||
Branch: "testing",
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Workflows: []string{"pr"},
|
||||
}
|
||||
configs := &RequestProcessor{
|
||||
configuredRepos: map[string][]*common.AutogitConfig{
|
||||
org: []*common.AutogitConfig{config1},
|
||||
},
|
||||
}
|
||||
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), config1.GitProjectName).Return(&models.Repository{
|
||||
SSHURL: "./prj",
|
||||
}, nil)
|
||||
gitea.EXPECT().GetRecentPullRequests(org, "testRepo")
|
||||
gitea.EXPECT().GetRecentCommits(org, "testRepo", "testing", gomock.Any())
|
||||
|
||||
c := CreateDefaultStateChecker(false, configs, gitea, 0)
|
||||
c.git = &testGit{
|
||||
git: git,
|
||||
}
|
||||
|
||||
err := c.VerifyProjectState("repo1_org", configs.configuredRepos[org], 0)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Project state with 1 PRs that doesn't trigger updates", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
process := mock_main.NewMockPullRequestProcessor(ctl)
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
DebugLogger: true,
|
||||
GitCommiter: "TestCommiter",
|
||||
GitEmail: "test@testing",
|
||||
GitPath: t.TempDir(),
|
||||
}
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
org := "repo1_org"
|
||||
config1 := &common.AutogitConfig{
|
||||
GitProjectName: "git_repo1",
|
||||
Organization: "repo1_org",
|
||||
Branch: "testing",
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Workflows: []string{"pr"},
|
||||
}
|
||||
configs := &RequestProcessor{
|
||||
configuredRepos: map[string][]*common.AutogitConfig{
|
||||
org: []*common.AutogitConfig{config1},
|
||||
},
|
||||
}
|
||||
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), config1.GitProjectName).Return(&models.Repository{
|
||||
SSHURL: "./prj",
|
||||
}, nil)
|
||||
|
||||
gitea.EXPECT().GetRecentPullRequests(org, "testRepo").Return([]*models.PullRequest{
|
||||
&models.PullRequest{
|
||||
ID: 1234,
|
||||
URL: "url here",
|
||||
Index: 1234,
|
||||
State: "open",
|
||||
Labels: []*models.Label{
|
||||
&models.Label{
|
||||
ID: 1,
|
||||
},
|
||||
},
|
||||
User: &models.User{},
|
||||
|
||||
Base: &models.PRBranchInfo {
|
||||
Name: "one",
|
||||
Ref: "main",
|
||||
Sha: "123",
|
||||
Repo: &models.Repository {
|
||||
Owner: &models.User {
|
||||
},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo {
|
||||
Name: "one",
|
||||
Ref: "main",
|
||||
Sha: "123",
|
||||
Repo: &models.Repository {
|
||||
Owner: &models.User {
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, nil)
|
||||
|
||||
gitea.EXPECT().GetRecentCommits(org, "testRepo", "testing", gomock.Any())
|
||||
|
||||
c := CreateDefaultStateChecker(false, configs, gitea, 0)
|
||||
c.git = &testGit{
|
||||
git: git,
|
||||
}
|
||||
process.EXPECT().Process(gomock.Any(), gomock.Any(), gomock.Any())
|
||||
c.processor.Opened = process
|
||||
|
||||
err := c.VerifyProjectState("repo1_org", configs.configuredRepos[org], 0)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user