forked from adamm/autogits
Compare commits
22 Commits
refactor
...
status_ser
Author | SHA256 | Date | |
---|---|---|---|
e8b6066bae | |||
42e2713cd8 | |||
3d24dce5c0 | |||
0cefb45d8a | |||
ddbb824006 | |||
69dac4ec31 | |||
b7e03ab465 | |||
76aec3aabb | |||
19fb7e277b | |||
51261f1bc1 | |||
949810709d | |||
c012570e89 | |||
44a3b15a7d | |||
c5db1c83a7 | |||
9f0909621b | |||
b3914b04bd | |||
b43a19189e | |||
01b665230e | |||
1a07d4c541 | |||
22e44dff47 | |||
e5d07f0ce6
|
|||
df9478a920
|
@@ -59,6 +59,7 @@ type AutogitConfig struct {
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
ReviewGroups []ReviewGroup
|
||||
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
|
||||
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
|
||||
|
||||
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
|
||||
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
|
||||
|
@@ -769,6 +769,8 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
|
||||
|
||||
LogDebug("Getting submodules for:", commitId)
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
@@ -916,6 +918,16 @@ type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
|
||||
/*
|
||||
<sub> A 4 character field describing the submodule state.
|
||||
"N..." when the entry is not a submodule.
|
||||
"S<c><m><u>" when the entry is a submodule.
|
||||
<c> is "C" if the commit changed; otherwise ".".
|
||||
<m> is "M" if it has tracked changes; otherwise ".".
|
||||
<u> is "U" if there are untracked changes; otherwise ".".
|
||||
*/
|
||||
SubmoduleChanges string
|
||||
}
|
||||
|
||||
func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
@@ -938,6 +950,20 @@ func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
}
|
||||
}
|
||||
func parseGitStatusString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 || c == ' ' {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
@@ -978,7 +1004,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -988,11 +1014,11 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGitStatusString(data)
|
||||
ret.States[0], err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1002,7 +1028,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1012,15 +1038,22 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 7); err != nil {
|
||||
if err = skipGitStatusEntry(data, 2); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = skipGitStatusEntry(data, 4); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1031,7 +1064,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@@ -555,6 +555,8 @@ func TestGitStatusParse(t *testing.T) {
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Unmerged,
|
||||
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
|
||||
|
||||
SubmoduleChanges: "N...",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@@ -603,14 +603,14 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName s
|
||||
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
prOptions := models.CreatePullRequestOption{
|
||||
Base: repo.DefaultBranch,
|
||||
Base: targetId,
|
||||
Head: srcId,
|
||||
Title: title,
|
||||
Body: body,
|
||||
}
|
||||
|
||||
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
|
||||
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(repo.DefaultBranch).WithHead(srcId),
|
||||
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(targetId).WithHead(srcId),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
); err == nil {
|
||||
return pr.Payload, nil
|
||||
@@ -718,20 +718,18 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
|
||||
limit := int64(20)
|
||||
page := int64(1)
|
||||
resCount := limit
|
||||
resCount := 1
|
||||
|
||||
retData := []*models.TimelineComment{}
|
||||
|
||||
for resCount == limit {
|
||||
for resCount > 0 {
|
||||
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
|
||||
issue.NewIssueGetCommentsAndTimelineParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithIndex(idx).
|
||||
WithPage(&page).
|
||||
WithLimit(&limit),
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
@@ -739,11 +737,13 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resCount = int64(len(res.Payload))
|
||||
resCount = len(res.Payload)
|
||||
LogDebug("page:", page, "len:", resCount)
|
||||
page++
|
||||
|
||||
retData = append(retData, res.Payload...)
|
||||
}
|
||||
LogDebug("total results:", len(retData))
|
||||
|
||||
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
|
||||
return time.Time(b.Created).Compare(time.Time(a.Created))
|
||||
|
56
common/manifest.go
Normal file
56
common/manifest.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Manifest struct {
|
||||
Subdirectories []string
|
||||
}
|
||||
|
||||
func (m *Manifest) SubdirForPackage(pkg string) string {
|
||||
if m == nil {
|
||||
return pkg
|
||||
}
|
||||
|
||||
idx := -1
|
||||
matchLen := 0
|
||||
basePkg := path.Base(pkg)
|
||||
lowercasePkg := strings.ToLower(basePkg)
|
||||
|
||||
for i, sub := range m.Subdirectories {
|
||||
basename := strings.ToLower(path.Base(sub))
|
||||
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
|
||||
idx = i
|
||||
matchLen = len(basename)
|
||||
}
|
||||
}
|
||||
|
||||
if idx > -1 {
|
||||
return path.Join(m.Subdirectories[idx], basePkg)
|
||||
}
|
||||
return pkg
|
||||
}
|
||||
|
||||
func ReadManifestFile(filename string) (*Manifest, error) {
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ParseManifestFile(data)
|
||||
}
|
||||
|
||||
func ParseManifestFile(data []byte) (*Manifest, error) {
|
||||
ret := &Manifest{}
|
||||
err := yaml.Unmarshal(data, ret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
56
common/manifest_test.go
Normal file
56
common/manifest_test.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestManifestSubdirAssignments(t *testing.T) {
|
||||
tests := []struct {
|
||||
Name string
|
||||
ManifestContent string
|
||||
Packages []string
|
||||
ManifestLocations []string
|
||||
}{
|
||||
{
|
||||
Name: "empty manifest",
|
||||
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
|
||||
ManifestLocations: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
|
||||
},
|
||||
{
|
||||
Name: "only few subdirs manifest",
|
||||
ManifestContent: "subdirectories:\n - a\n - b",
|
||||
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
|
||||
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "b/Boost", "NodeJS"},
|
||||
},
|
||||
{
|
||||
Name: "multilayer subdirs manifest",
|
||||
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo",
|
||||
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
|
||||
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "NodeJS"},
|
||||
},
|
||||
{
|
||||
Name: "multilayer subdirs manifest with trailing /",
|
||||
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
|
||||
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
|
||||
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
m, err := common.ParseManifestFile([]byte(test.ManifestContent))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for i, pkg := range test.Packages {
|
||||
expected := test.ManifestLocations[i]
|
||||
if l := m.SubdirForPackage(pkg); l != expected {
|
||||
t.Error("Expected:", expected, "but got:", l)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -562,25 +562,44 @@ func (c *ObsClient) DeleteProject(project string) error {
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
|
||||
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
|
||||
query := url.Query()
|
||||
query.Add("nostream", "1")
|
||||
query.Add("start", "0")
|
||||
url.RawQuery = query.Encode()
|
||||
res, err := c.ObsRequestRaw("GET", url.String(), nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res.Body, nil
|
||||
}
|
||||
|
||||
type PackageBuildStatus struct {
|
||||
Package string `xml:"package,attr"`
|
||||
Code string `xml:"code,attr"`
|
||||
Details string `xml:"details"`
|
||||
|
||||
LastUpdate int64
|
||||
}
|
||||
|
||||
type BuildResult struct {
|
||||
Project string `xml:"project,attr"`
|
||||
Repository string `xml:"repository,attr"`
|
||||
Arch string `xml:"arch,attr"`
|
||||
Code string `xml:"code,attr"`
|
||||
Dirty bool `xml:"dirty,attr"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
ScmInfo string `xml:"scminfo"`
|
||||
Status []PackageBuildStatus `xml:"status"`
|
||||
Binaries []BinaryList `xml:"binarylist"`
|
||||
Project string `xml:"project,attr"`
|
||||
Repository string `xml:"repository,attr"`
|
||||
Arch string `xml:"arch,attr"`
|
||||
|
||||
Code string `xml:"code,attr"`
|
||||
Dirty bool `xml:"dirty,attr"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
ScmInfo string `xml:"scminfo"`
|
||||
Status []PackageBuildStatus `xml:"status"`
|
||||
Binaries []BinaryList `xml:"binarylist"`
|
||||
|
||||
LastUpdate int64
|
||||
}
|
||||
|
||||
type Binary struct {
|
||||
@@ -600,6 +619,7 @@ type BuildResultList struct {
|
||||
Result []BuildResult `xml:"result"`
|
||||
|
||||
isLastBuild bool
|
||||
LastUpdate int64
|
||||
}
|
||||
|
||||
func (r *BuildResultList) GetPackageList() []string {
|
||||
@@ -622,6 +642,48 @@ func (r *BuildResultList) GetPackageList() []string {
|
||||
return pkgList
|
||||
}
|
||||
|
||||
func packageSort(A, B PackageBuildStatus) int {
|
||||
return strings.Compare(A.Package, B.Package)
|
||||
}
|
||||
|
||||
func repoSort(A, B BuildResult) int {
|
||||
eq := strings.Compare(A.Project, B.Project)
|
||||
if eq == 0 {
|
||||
eq = strings.Compare(A.Repository, B.Repository)
|
||||
if eq == 0 {
|
||||
eq = strings.Compare(A.Arch, B.Arch)
|
||||
}
|
||||
}
|
||||
return eq
|
||||
}
|
||||
|
||||
func (r *BuildResultList) MergePackageState(now int64, pkgState *BuildResultList) {
|
||||
for _, nr := range pkgState.Result {
|
||||
idx, found := slices.BinarySearchFunc(r.Result, nr, repoSort)
|
||||
// not found, new repo?
|
||||
if !found {
|
||||
nr.LastUpdate = now
|
||||
r.Result = slices.Insert(r.Result, idx, nr)
|
||||
continue
|
||||
}
|
||||
|
||||
// update current repo
|
||||
repo := &r.Result[idx]
|
||||
|
||||
// update all the packages in the repo
|
||||
for _, p := range nr.Status {
|
||||
p.LastUpdate = now
|
||||
idx, found := slices.BinarySearchFunc(repo.Status, p, packageSort)
|
||||
if !found {
|
||||
repo.Status = slices.Insert(repo.Status, idx, p)
|
||||
continue
|
||||
}
|
||||
|
||||
repo.Status[idx] = p
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *BuildResultList) BuildResultSummary() (success, finished bool) {
|
||||
if r == nil {
|
||||
return false, false
|
||||
@@ -889,5 +951,11 @@ func (c *ObsClient) BuildStatusWithState(project string, opts *BuildResultOption
|
||||
if ret != nil {
|
||||
ret.isLastBuild = opts.LastBuild
|
||||
}
|
||||
|
||||
slices.SortFunc(ret.Result, repoSort)
|
||||
for _, r := range ret.Result {
|
||||
slices.SortFunc(r.Status, packageSort)
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
37
common/pr.go
37
common/pr.go
@@ -25,6 +25,13 @@ type PRSet struct {
|
||||
BotUser string
|
||||
}
|
||||
|
||||
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
|
||||
org = prinfo.PR.Base.Repo.Owner.UserName
|
||||
repo = prinfo.PR.Base.Repo.Name
|
||||
idx = prinfo.PR.Index
|
||||
return
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
@@ -121,27 +128,28 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) Contains(pr *models.PullRequest) bool {
|
||||
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
|
||||
for _, p := range rs.PRs {
|
||||
if p.PR.Base.RepoID == pr.Base.RepoID &&
|
||||
p.PR.Head.Sha == pr.Head.Sha &&
|
||||
p.PR.Base.Name == pr.Base.Name {
|
||||
return true
|
||||
return p, true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (rs *PRSet) AddPR(pr *models.PullRequest) error {
|
||||
if rs.Contains(pr) {
|
||||
return nil
|
||||
func (rs *PRSet) AddPR(pr *models.PullRequest) *PRInfo {
|
||||
if pr, found := rs.Find(pr); found {
|
||||
return pr
|
||||
}
|
||||
|
||||
rs.PRs = append(rs.PRs, &PRInfo{
|
||||
prinfo := &PRInfo{
|
||||
PR: pr,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
rs.PRs = append(rs.PRs, prinfo)
|
||||
return prinfo
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
|
||||
@@ -172,6 +180,15 @@ func (rs *PRSet) GetPrjGitPR() (*PRInfo, error) {
|
||||
return nil, PRSet_PrjGitMissing
|
||||
}
|
||||
|
||||
func (rs *PRSet) NeedRecreatingPrjGit(currentBranchHash string) bool {
|
||||
pr, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return pr.PR.Base.Sha == currentBranchHash
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsConsistent() bool {
|
||||
prjpr_info, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
@@ -302,7 +319,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
}
|
||||
pr.Reviews = r
|
||||
if !pr.Reviews.IsManualMergeOK() {
|
||||
LogInfo("Not approved manual merge")
|
||||
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
@@ -15,7 +15,23 @@ import (
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
/*
|
||||
func TestCockpit(t *testing.T) {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
|
||||
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Fail to timeline", err)
|
||||
}
|
||||
t.Log(tl)
|
||||
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
|
||||
if err != nil {
|
||||
t.Fatal("Error:", err)
|
||||
}
|
||||
|
||||
t.Error(r)
|
||||
}
|
||||
*/
|
||||
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
|
||||
timeline := make([]*models.TimelineComment, len(reviews))
|
||||
for idx, review := range reviews {
|
||||
|
@@ -22,55 +22,32 @@ import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
rabbitmq "github.com/rabbitmq/amqp091-go"
|
||||
)
|
||||
|
||||
const RequestType_CreateBrachTag = "create"
|
||||
const RequestType_DeleteBranchTag = "delete"
|
||||
const RequestType_Fork = "fork"
|
||||
const RequestType_Issue = "issues"
|
||||
const RequestType_IssueAssign = "issue_assign"
|
||||
const RequestType_IssueComment = "issue_comment"
|
||||
const RequestType_IssueLabel = "issue_label"
|
||||
const RequestType_IssueMilestone = "issue_milestone"
|
||||
const RequestType_Push = "push"
|
||||
const RequestType_Repository = "repository"
|
||||
const RequestType_Release = "release"
|
||||
const RequestType_PR = "pull_request"
|
||||
const RequestType_PRAssign = "pull_request_assign"
|
||||
const RequestType_PRLabel = "pull_request_label"
|
||||
const RequestType_PRComment = "pull_request_comment"
|
||||
const RequestType_PRMilestone = "pull_request_milestone"
|
||||
const RequestType_PRSync = "pull_request_sync"
|
||||
const RequestType_PRReviewAccepted = "pull_request_review_approved"
|
||||
const RequestType_PRReviewRejected = "pull_request_review_rejected"
|
||||
const RequestType_PRReviewRequest = "pull_request_review_request"
|
||||
const RequestType_PRReviewComment = "pull_request_review_comment"
|
||||
const RequestType_Wiki = "wiki"
|
||||
|
||||
type RequestProcessor interface {
|
||||
ProcessFunc(*Request) error
|
||||
}
|
||||
|
||||
type ListenDefinitions struct {
|
||||
type RabbitConnection struct {
|
||||
RabbitURL *url.URL // amqps://user:password@host/queue
|
||||
|
||||
GitAuthor string
|
||||
Handlers map[string]RequestProcessor
|
||||
Orgs []string
|
||||
queueName string
|
||||
ch *rabbitmq.Channel
|
||||
|
||||
topics []string
|
||||
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
|
||||
}
|
||||
|
||||
type RabbitProcessor interface {
|
||||
GenerateTopics() []string
|
||||
|
||||
Connection() *RabbitConnection
|
||||
ProcessRabbitMessage(msg RabbitMessage) error
|
||||
}
|
||||
|
||||
type RabbitMessage rabbitmq.Delivery
|
||||
|
||||
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
|
||||
func (l *RabbitConnection) ProcessTopicChanges() {
|
||||
for {
|
||||
topic, ok := <-l.topicSubChanges
|
||||
if !ok {
|
||||
@@ -80,11 +57,11 @@ func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName
|
||||
LogDebug(" topic change:", topic)
|
||||
switch topic[0] {
|
||||
case '+':
|
||||
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
|
||||
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
case '-':
|
||||
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
|
||||
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
default:
|
||||
@@ -93,7 +70,7 @@ func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
queueName := l.RabbitURL.Path
|
||||
l.RabbitURL.Path = ""
|
||||
|
||||
@@ -152,7 +129,7 @@ func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
LogDebug(" -- listening to topics:")
|
||||
l.topicSubChanges = make(chan string)
|
||||
defer close(l.topicSubChanges)
|
||||
go l.processTopicChanges(ch, q.Name)
|
||||
go l.ProcessTopicChanges()
|
||||
|
||||
for _, topic := range l.topics {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
@@ -174,18 +151,18 @@ func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
|
||||
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
LogError(r)
|
||||
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
|
||||
time.Sleep(5 * time.Second)
|
||||
go l.connectAndProcessRabbitMQ(ch)
|
||||
go l.ConnectAndProcessRabbitMQ(ch)
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
err := l.processRabbitMQ(ch)
|
||||
err := l.ProcessRabbitMQ(ch)
|
||||
if err != nil {
|
||||
LogError("Error in RabbitMQ connection. %#v", err)
|
||||
LogInfo("Reconnecting in 2 seconds...")
|
||||
@@ -194,49 +171,20 @@ func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
|
||||
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
|
||||
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
|
||||
|
||||
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
l.topics = processor.GenerateTopics()
|
||||
|
||||
ch := make(chan RabbitMessage, 100)
|
||||
go l.connectAndProcessRabbitMQ(ch)
|
||||
go l.ConnectAndProcessRabbitMQ(ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func ProcessEvent(f RequestProcessor, request *Request) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
LogError("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
LogError(err)
|
||||
}
|
||||
LogError(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := f.ProcessFunc(request); err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) generateTopics() []string {
|
||||
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
|
||||
scope := "suse"
|
||||
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
|
||||
scope = "opensuse"
|
||||
}
|
||||
|
||||
for _, org := range l.Orgs {
|
||||
for requestType, _ := range l.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(topics)
|
||||
return slices.Compact(topics)
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) UpdateTopics() {
|
||||
newTopics := l.generateTopics()
|
||||
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
|
||||
newTopics := processor.GenerateTopics()
|
||||
|
||||
j := 0
|
||||
next_new_topic:
|
||||
@@ -273,14 +221,8 @@ next_new_topic:
|
||||
l.topics = newTopics
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
|
||||
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
|
||||
LogDebug("# Handlers:", len(l.Handlers))
|
||||
LogDebug("# Orgs:", len(l.Orgs))
|
||||
|
||||
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
l.topics = l.generateTopics()
|
||||
ch := l.connectToRabbitMQ()
|
||||
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
|
||||
ch := processor.Connection().ConnectToRabbitMQ(processor)
|
||||
|
||||
for {
|
||||
msg, ok := <-ch
|
||||
@@ -289,36 +231,8 @@ func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
|
||||
}
|
||||
|
||||
LogDebug("event:", msg.RoutingKey)
|
||||
|
||||
route := strings.Split(msg.RoutingKey, ".")
|
||||
if len(route) > 3 {
|
||||
reqType := route[3]
|
||||
org := route[2]
|
||||
|
||||
if !slices.Contains(l.Orgs, org) {
|
||||
LogInfo("Got event for unhandeled org:", org)
|
||||
continue
|
||||
}
|
||||
|
||||
LogDebug("org:", org, "type:", reqType)
|
||||
if handler, found := l.Handlers[reqType]; found {
|
||||
/* h, err := CreateRequestHandler()
|
||||
if err != nil {
|
||||
log.Println("Cannot create request handler", err)
|
||||
continue
|
||||
}
|
||||
*/
|
||||
req, err := ParseRequestJSON(reqType, msg.Body)
|
||||
if err != nil {
|
||||
LogError("Error parsing request JSON:", err)
|
||||
continue
|
||||
} else {
|
||||
LogDebug("processing req", req.Type)
|
||||
// h.Request = req
|
||||
ProcessEvent(handler, req)
|
||||
|
||||
}
|
||||
}
|
||||
if err := processor.ProcessRabbitMessage(msg); err != nil {
|
||||
LogError("Error processing", msg.RoutingKey, err)
|
||||
}
|
||||
}
|
||||
}
|
130
common/rabbitmq_gitea.go
Normal file
130
common/rabbitmq_gitea.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
RequestType_CreateBrachTag = "create"
|
||||
RequestType_DeleteBranchTag = "delete"
|
||||
RequestType_Fork = "fork"
|
||||
RequestType_Issue = "issues"
|
||||
RequestType_IssueAssign = "issue_assign"
|
||||
RequestType_IssueComment = "issue_comment"
|
||||
RequestType_IssueLabel = "issue_label"
|
||||
RequestType_IssueMilestone = "issue_milestone"
|
||||
RequestType_Push = "push"
|
||||
RequestType_Repository = "repository"
|
||||
RequestType_Release = "release"
|
||||
RequestType_PR = "pull_request"
|
||||
RequestType_PRAssign = "pull_request_assign"
|
||||
RequestType_PRLabel = "pull_request_label"
|
||||
RequestType_PRComment = "pull_request_comment"
|
||||
RequestType_PRMilestone = "pull_request_milestone"
|
||||
RequestType_PRSync = "pull_request_sync"
|
||||
RequestType_PRReviewAccepted = "pull_request_review_approved"
|
||||
RequestType_PRReviewRejected = "pull_request_review_rejected"
|
||||
RequestType_PRReviewRequest = "pull_request_review_request"
|
||||
RequestType_PRReviewComment = "pull_request_review_comment"
|
||||
RequestType_Wiki = "wiki"
|
||||
)
|
||||
|
||||
type RequestProcessor interface {
|
||||
ProcessFunc(*Request) error
|
||||
}
|
||||
|
||||
type RabbitMQGiteaEventsProcessor struct {
|
||||
Handlers map[string]RequestProcessor
|
||||
Orgs []string
|
||||
|
||||
c *RabbitConnection
|
||||
}
|
||||
|
||||
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
|
||||
if gitea.c == nil {
|
||||
gitea.c = &RabbitConnection{}
|
||||
}
|
||||
return gitea.c
|
||||
}
|
||||
|
||||
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
|
||||
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
|
||||
scope := "suse"
|
||||
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
|
||||
scope = "opensuse"
|
||||
}
|
||||
|
||||
for _, org := range gitea.Orgs {
|
||||
for requestType, _ := range gitea.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(topics)
|
||||
return slices.Compact(topics)
|
||||
}
|
||||
|
||||
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
|
||||
route := strings.Split(msg.RoutingKey, ".")
|
||||
if len(route) > 3 {
|
||||
reqType := route[3]
|
||||
org := route[2]
|
||||
|
||||
if !slices.Contains(gitea.Orgs, org) {
|
||||
LogInfo("Got event for unhandeled org:", org)
|
||||
return nil
|
||||
}
|
||||
|
||||
LogDebug("org:", org, "type:", reqType)
|
||||
if handler, found := gitea.Handlers[reqType]; found {
|
||||
req, err := ParseRequestJSON(reqType, msg.Body)
|
||||
if err != nil {
|
||||
LogError("Error parsing request JSON:", err)
|
||||
return nil
|
||||
} else {
|
||||
LogDebug("processing req", req.Type)
|
||||
// h.Request = req
|
||||
ProcessEvent(handler, req)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("Invalid routing key: %s", route)
|
||||
}
|
||||
|
||||
func ProcessEvent(f RequestProcessor, request *Request) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
LogError("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
LogError(err)
|
||||
}
|
||||
LogError(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := f.ProcessFunc(request); err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
}
|
115
common/rabbitmq_obs.go
Normal file
115
common/rabbitmq_obs.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
ObsMessageType_PackageBuildFail = "package.build_fail"
|
||||
ObsMessageType_PackageBuildSuccess = "package.build_success"
|
||||
ObsMessageType_PackageBuildUnchanged = "package.build_unchanged"
|
||||
|
||||
ObsMessageType_RepoBuildFinished = "repo.build_finished"
|
||||
ObsMessageType_RepoBuildStarted = "repo.build_started"
|
||||
)
|
||||
|
||||
type BuildResultMsg struct {
|
||||
Status string
|
||||
Project string `json:"project"`
|
||||
Package string `json:"package"`
|
||||
Repo string `json:"repository"`
|
||||
Arch string `json:"arch"`
|
||||
|
||||
StartTime int32 `json:"starttime"`
|
||||
EndTime int32 `json:"endtime"`
|
||||
WorkerID string `json:"workerid"`
|
||||
Version string `json:"versrel"`
|
||||
Build string `json:"buildtype"`
|
||||
}
|
||||
|
||||
type RepoBuildMsg struct {
|
||||
Status string
|
||||
Project string `json:"project"`
|
||||
Repo string `json:"repo"`
|
||||
Arch string `json:"arch"`
|
||||
BuildId string `json:"buildid"`
|
||||
}
|
||||
|
||||
var ObsRabbitMessageError_UnknownMessageType error = errors.New("Unknown message type")
|
||||
var ObsRabbitMessageError_ParseError error = errors.New("JSON parsing error")
|
||||
|
||||
func ParseObsRabbitMessaege(ObsMessageType string, data []byte) (interface{}, error) {
|
||||
unmarshall := func(data []byte, v any) (interface{}, error) {
|
||||
if err := json.Unmarshal(data, v); err != nil {
|
||||
return nil, fmt.Errorf("%w: %s", ObsRabbitMessageError_ParseError, err)
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
switch ObsMessageType {
|
||||
case ObsMessageType_PackageBuildSuccess, ObsMessageType_PackageBuildUnchanged:
|
||||
ret := &BuildResultMsg{Status: "succeeded"}
|
||||
return unmarshall(data, ret)
|
||||
case ObsMessageType_PackageBuildFail:
|
||||
ret := &BuildResultMsg{Status: "failed"}
|
||||
return unmarshall(data, ret)
|
||||
case ObsMessageType_RepoBuildFinished:
|
||||
ret := &RepoBuildMsg{Status: "finished"}
|
||||
return unmarshall(data, ret)
|
||||
case ObsMessageType_RepoBuildStarted:
|
||||
ret := &RepoBuildMsg{Status: "building"}
|
||||
return unmarshall(data, ret)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("%w: %s", ObsRabbitMessageError_UnknownMessageType, ObsMessageType)
|
||||
}
|
||||
|
||||
type ObsMessageProcessor func(topic string, data []byte) error
|
||||
|
||||
type RabbitMQObsBuildStatusProcessor struct {
|
||||
Handlers map[string]ObsMessageProcessor
|
||||
|
||||
c *RabbitConnection
|
||||
}
|
||||
|
||||
func (o *RabbitMQObsBuildStatusProcessor) routingKeyPrefix() string {
|
||||
if strings.HasSuffix(o.c.RabbitURL.Hostname(), "opensuse.org") {
|
||||
return "opensuse"
|
||||
}
|
||||
|
||||
return "suse"
|
||||
}
|
||||
|
||||
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
|
||||
prefix := o.routingKeyPrefix()
|
||||
msgs := make([]string, len(o.Handlers))
|
||||
idx := 0
|
||||
for k, _ := range o.Handlers {
|
||||
msgs[idx] = prefix + ".obs." + k
|
||||
idx++
|
||||
}
|
||||
slices.Sort(msgs)
|
||||
return msgs
|
||||
}
|
||||
|
||||
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
|
||||
if o.c == nil {
|
||||
o.c = &RabbitConnection{}
|
||||
}
|
||||
|
||||
return o.c
|
||||
}
|
||||
|
||||
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
|
||||
prefix := o.routingKeyPrefix() + ".obs."
|
||||
topic := strings.TrimPrefix(msg.RoutingKey, prefix)
|
||||
if h, ok := o.Handlers[topic]; ok {
|
||||
return h(topic, msg.Body)
|
||||
}
|
||||
|
||||
return fmt.Errorf("Unhandled message received: %s", msg.RoutingKey)
|
||||
}
|
@@ -50,11 +50,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
u, _ := url.Parse("amqps://rabbit.example.com")
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := ListenDefinitions{
|
||||
Orgs: test.orgs1,
|
||||
Handlers: make(map[string]RequestProcessor),
|
||||
topicSubChanges: make(chan string, len(test.topicDelta)*10),
|
||||
RabbitURL: u,
|
||||
l := &RabbitMQGiteaEventsProcessor{
|
||||
Orgs: test.orgs1,
|
||||
Handlers: make(map[string]RequestProcessor),
|
||||
c: &RabbitConnection{
|
||||
RabbitURL: u,
|
||||
topicSubChanges: make(chan string, len(test.topicDelta)*10),
|
||||
},
|
||||
}
|
||||
|
||||
slices.Sort(test.topicDelta)
|
||||
@@ -64,11 +66,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
}
|
||||
|
||||
changes := []string{}
|
||||
l.UpdateTopics()
|
||||
l.c.UpdateTopics(l)
|
||||
a:
|
||||
for {
|
||||
select {
|
||||
case c := <-l.topicSubChanges:
|
||||
case c := <-l.c.topicSubChanges:
|
||||
changes = append(changes, c)
|
||||
default:
|
||||
changes = []string{}
|
||||
@@ -78,13 +80,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
|
||||
l.Orgs = test.orgs2
|
||||
|
||||
l.UpdateTopics()
|
||||
l.c.UpdateTopics(l)
|
||||
changes = []string{}
|
||||
|
||||
b:
|
||||
for {
|
||||
select {
|
||||
case c := <-l.topicSubChanges:
|
||||
case c := <-l.c.topicSubChanges:
|
||||
changes = append(changes, c)
|
||||
default:
|
||||
slices.Sort(changes)
|
@@ -25,26 +25,39 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, r
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reviews := make([]*models.PullReview, 0, 10)
|
||||
reviews := make([]*models.PullReview, 0, len(reviewers))
|
||||
var comments []*models.TimelineComment
|
||||
|
||||
alreadyHaveUserReview := func(user string) bool {
|
||||
for _, r := range reviews {
|
||||
if r.User != nil && r.User.UserName == user {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
for idx, item := range timeline {
|
||||
if item.Type == TimelineCommentType_Review {
|
||||
for _, r := range rawReviews {
|
||||
if r.ID == item.ReviewID {
|
||||
reviews = append(reviews, r)
|
||||
if !alreadyHaveUserReview(r.User.UserName) {
|
||||
reviews = append(reviews, r)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if item.Type == TimelineCommentType_Comment {
|
||||
comments = append(comments, item)
|
||||
} else if item.Type == TimelineCommentType_PushPull {
|
||||
LogDebug("cut-off", item.Created)
|
||||
timeline = timeline[0:idx]
|
||||
break
|
||||
} else {
|
||||
LogDebug("Unhandled timeline type:", item.Type)
|
||||
}
|
||||
}
|
||||
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
|
||||
|
||||
return &PRReviews{
|
||||
reviews: reviews,
|
||||
@@ -72,6 +85,7 @@ func (r *PRReviews) IsManualMergeOK() bool {
|
||||
if c.Updated != c.Created {
|
||||
continue
|
||||
}
|
||||
LogDebug("comment:", c.User.UserName, c.Body)
|
||||
if slices.Contains(r.reviewers, c.User.UserName) {
|
||||
if bodyCommandManualMergeOK(c.Body) {
|
||||
return true
|
||||
|
@@ -113,6 +113,10 @@ func (s *Submodule) parseKeyValue(line string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
|
||||
return manifest.SubdirForPackage(s.Path)
|
||||
}
|
||||
|
||||
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
|
@@ -173,4 +173,3 @@ func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
|
||||
|
||||
return "", DevelProjectNotFound
|
||||
}
|
||||
|
||||
|
@@ -73,6 +73,10 @@ func runObsCommand(args ...string) ([]string, error) {
|
||||
|
||||
var DebugMode bool
|
||||
|
||||
func giteaPackage(pkg string) string {
|
||||
return strings.ReplaceAll(pkg, "+", "_")
|
||||
}
|
||||
|
||||
func projectMaintainer(obs *common.ObsClient, prj string) ([]string, []string) { // users, groups
|
||||
meta, err := obs.GetProjectMeta(prj)
|
||||
if err != nil {
|
||||
@@ -186,13 +190,16 @@ func cloneDevel(git common.Git, gitDir, outName, urlString string) error {
|
||||
}
|
||||
|
||||
func importRepos(packages []string) {
|
||||
RepoToObsName := make(map[string]string)
|
||||
|
||||
factoryRepos := make([]*models.Repository, 0, len(packages)*2)
|
||||
develProjectPackages := make([]string, 0, len(packages))
|
||||
for _, pkg := range packages {
|
||||
src_pkg_name := strings.Split(pkg, ":")
|
||||
RepoToObsName[giteaPackage(src_pkg_name[0])] = src_pkg_name[0]
|
||||
repo, err := client.Repository.RepoGet(
|
||||
repository.NewRepoGetParams().
|
||||
WithDefaults().WithOwner("pool").WithRepo(src_pkg_name[0]),
|
||||
WithDefaults().WithOwner("pool").WithRepo(giteaPackage(src_pkg_name[0])),
|
||||
r.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
@@ -219,7 +226,7 @@ func importRepos(packages []string) {
|
||||
|
||||
oldPackageNames := make([]string, 0, len(factoryRepos))
|
||||
for _, repo := range factoryRepos {
|
||||
oldPackageNames = append(oldPackageNames, repo.Name)
|
||||
oldPackageNames = append(oldPackageNames, RepoToObsName[repo.Name])
|
||||
}
|
||||
|
||||
// fork packags from pool
|
||||
@@ -241,44 +248,60 @@ func importRepos(packages []string) {
|
||||
log.Println("adding remotes...")
|
||||
for i := 0; i < len(factoryRepos); i++ {
|
||||
pkg := factoryRepos[i]
|
||||
pkgName := RepoToObsName[pkg.Name]
|
||||
gitName := pkg.Name
|
||||
|
||||
// verify that package was created by `git-importer`, or it's scmsync package and clone it
|
||||
fi, err := os.Stat(filepath.Join(git.GetPath(), pkg.Name))
|
||||
fi, err := os.Stat(filepath.Join(git.GetPath(), gitName))
|
||||
if os.IsNotExist(err) {
|
||||
if slices.Contains(develProjectPackages, pkg.Name) {
|
||||
if slices.Contains(develProjectPackages, pkgName) {
|
||||
// failed import of former factory package
|
||||
log.Println("Failed to import former factory pkg:", pkgName)
|
||||
continue
|
||||
}
|
||||
|
||||
// scmsync?
|
||||
devel_project, err := devel_projects.GetDevelProject(pkg.Name)
|
||||
devel_project, err := devel_projects.GetDevelProject(pkgName)
|
||||
if err != nil {
|
||||
log.Panicln("devel project not found for", pkg.Name, "err:", err)
|
||||
log.Panicln("devel project not found for", RepoToObsName[pkg.Name], "err:", err)
|
||||
}
|
||||
meta, _ := obs.GetPackageMeta(devel_project, pkg.Name)
|
||||
meta, _ := obs.GetPackageMeta(devel_project, pkgName)
|
||||
if len(meta.ScmSync) > 0 {
|
||||
if err2 := cloneDevel(git, "", pkg.Name, meta.ScmSync); err != nil {
|
||||
if err2 := cloneDevel(git, "", gitName, meta.ScmSync); err != nil {
|
||||
log.Panicln(err2)
|
||||
}
|
||||
git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main")
|
||||
if err2 := git.GitExec(gitName, "checkout", "-B", "main"); err2 != nil {
|
||||
git.GitExecOrPanic(gitName, "checkout", "-B", "master")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// try again, should now exist
|
||||
if fi, err = os.Stat(filepath.Join(git.GetPath(), pkg.Name)); err != nil {
|
||||
if fi, err = os.Stat(filepath.Join(git.GetPath(), gitName)); err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
} else if err != nil {
|
||||
log.Panicln(err)
|
||||
} else {
|
||||
// verify that we do not have scmsync for imported packages
|
||||
meta, err := obs.GetPackageMeta(prj, pkg.Name)
|
||||
meta, err := obs.GetPackageMeta(prj, pkgName)
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
if len(meta.ScmSync) > 0 {
|
||||
log.Panicln("importing an scmsync package??:", prj, pkg.Name)
|
||||
u, err := url.Parse(meta.ScmSync)
|
||||
if err != nil {
|
||||
log.Println("Invlid scmsync in", pkg, meta.ScmSync, err)
|
||||
}
|
||||
o, err := url.Parse(strings.TrimSpace(git.GitExecWithOutputOrPanic(gitName, "remote", "get-url", "origin")))
|
||||
log.Println("Invlid scmsync in git repo", pkg, meta.ScmSync, err)
|
||||
if u.Host != o.Host || u.Path != u.Path {
|
||||
log.Panicln("importing an scmsync package??:", prj, gitName)
|
||||
} else {
|
||||
log.Println("previous SCMSYNC package. Pull.")
|
||||
git.GitExecOrPanic(gitName, "pull", "origin", "HEAD:main")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -287,11 +310,11 @@ func importRepos(packages []string) {
|
||||
}
|
||||
|
||||
// add remote repos
|
||||
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show", "-n")
|
||||
out := git.GitExecWithOutputOrPanic(gitName, "remote", "show", "-n")
|
||||
switch pkg.Owner.UserName {
|
||||
case "pool":
|
||||
if !slices.Contains(strings.Split(out, "\n"), "pool") {
|
||||
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "add", "pool", pkg.CloneURL)
|
||||
out := git.GitExecWithOutputOrPanic(gitName, "remote", "add", "pool", pkg.CloneURL)
|
||||
if len(strings.TrimSpace(out)) > 1 {
|
||||
log.Println(out)
|
||||
}
|
||||
@@ -398,12 +421,22 @@ func importRepos(packages []string) {
|
||||
|
||||
for i := 0; i < len(develProjectPackages); i++ {
|
||||
pkg := develProjectPackages[i]
|
||||
meta, _ := obs.GetPackageMeta(prj, pkg)
|
||||
if len(meta.ScmSync) > 0 {
|
||||
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
|
||||
log.Panicln(err2)
|
||||
meta, err := obs.GetPackageMeta(prj, pkg)
|
||||
if err != nil {
|
||||
meta, err = obs.GetPackageMeta(prj, pkg)
|
||||
if err != nil {
|
||||
log.Println("Error fetching pkg meta for:", prj, pkg, err)
|
||||
}
|
||||
}
|
||||
if meta == nil {
|
||||
log.Println(" **** pkg meta is nil? ****")
|
||||
} else if len(meta.ScmSync) > 0 {
|
||||
if _, err := os.Stat(path.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
|
||||
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
|
||||
log.Panicln(err2)
|
||||
}
|
||||
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
|
||||
}
|
||||
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
|
||||
continue
|
||||
} else {
|
||||
common.PanicOnError(gitImporter(prj, pkg))
|
||||
@@ -465,7 +498,7 @@ func importRepos(packages []string) {
|
||||
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show"), "\n")
|
||||
if !slices.Contains(remotes, "develorigin") {
|
||||
git.GitExecOrPanic(pkg.Name, "remote", "add", "develorigin", repo.SSHURL)
|
||||
// git.GitExecOrPanic(pkg.Name, "fetch", "devel")
|
||||
// git.GitExecOrPanic(pkgName, "fetch", "devel")
|
||||
}
|
||||
if slices.Contains(remotes, "origin") {
|
||||
git.GitExecOrPanic(pkg.Name, "lfs", "fetch", "--all")
|
||||
@@ -473,8 +506,8 @@ func importRepos(packages []string) {
|
||||
}
|
||||
git.GitExecOrPanic(pkg.Name, "push", "develorigin", "main", "-f")
|
||||
git.GitExec(pkg.Name, "push", "develorigin", "--delete", "factory", "devel")
|
||||
// git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main", "devel/main")
|
||||
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(&models.EditRepoOption{
|
||||
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
|
||||
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(repo.Name)).WithBody(&models.EditRepoOption{
|
||||
DefaultBranch: "main",
|
||||
DefaultMergeStyle: "fast-forward-only",
|
||||
HasPullRequests: true,
|
||||
@@ -499,12 +532,13 @@ func importRepos(packages []string) {
|
||||
|
||||
for _, pkg := range develProjectPackages {
|
||||
var repo *models.Repository
|
||||
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
|
||||
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
|
||||
giteaPkg := giteaPackage(pkg)
|
||||
_, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
|
||||
&models.CreateRepoOption{
|
||||
ObjectFormatName: "sha256",
|
||||
AutoInit: false,
|
||||
Name: &pkg,
|
||||
Name: &giteaPkg,
|
||||
DefaultBranch: "main",
|
||||
}),
|
||||
r.DefaultAuthentication,
|
||||
@@ -514,7 +548,7 @@ func importRepos(packages []string) {
|
||||
log.Panicln("Error creating new package repository:", pkg, err)
|
||||
}
|
||||
|
||||
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(
|
||||
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPkg).WithBody(
|
||||
&models.EditRepoOption{
|
||||
HasPullRequests: true,
|
||||
HasPackages: false,
|
||||
@@ -554,7 +588,7 @@ func importRepos(packages []string) {
|
||||
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
|
||||
git.GitExec(pkg, "push", "develorigin", "--delete", "factory", "devel")
|
||||
|
||||
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(&models.EditRepoOption{
|
||||
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.EditRepoOption{
|
||||
DefaultBranch: "main",
|
||||
DefaultMergeStyle: "fast-forward-only",
|
||||
}), r.DefaultAuthentication)
|
||||
@@ -653,7 +687,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
|
||||
missing := []string{}
|
||||
uids := make([]common.PersonRepoMeta, len(orig_uids))
|
||||
copy(uids, orig_uids)
|
||||
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
|
||||
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
|
||||
if err != nil {
|
||||
if errors.Is(err, &repository.RepoListCollaboratorsNotFound{}) {
|
||||
return missing
|
||||
@@ -674,7 +708,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
|
||||
log.Println("missing collabs for", pkg, ":", uids)
|
||||
}
|
||||
for _, u := range uids {
|
||||
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(pkg).WithBody(&models.AddCollaboratorOption{
|
||||
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.AddCollaboratorOption{
|
||||
Permission: "write",
|
||||
}).WithCollaborator(u.UserID), r.DefaultAuthentication)
|
||||
|
||||
@@ -809,14 +843,14 @@ func createPrjGit() {
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
file.WriteString("{\n // Reference build project\n \"ObsProject\": \""+prj+"\",\n}\n")
|
||||
file.WriteString("{\n // Reference build project\n \"ObsProject\": \"" + prj + "\",\n}\n")
|
||||
file.Close()
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "add", "staging.config")
|
||||
|
||||
if file, err = os.Create(path.Join(git.GetPath(), common.DefaultGitPrj, "workflow.config")); err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \""+org+"\",\n}\n")
|
||||
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \"" + org + "\",\n}\n")
|
||||
file.Close()
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "add", "workflow.config")
|
||||
}
|
||||
@@ -857,6 +891,7 @@ func main() {
|
||||
syncMaintainers := flags.Bool("sync-maintainers-only", false, "Sync maintainers to Gitea and exit")
|
||||
flags.BoolVar(&forceBadPool, "bad-pool", false, "Force packages if pool has no branches due to bad import")
|
||||
flags.BoolVar(&forceNonPoolPackages, "non-pool", false, "Allow packages that are not in pool to be created. WARNING: Can't add to factory later!")
|
||||
specificPackage := flags.String("package", "", "Process specific package only, ignoring the others")
|
||||
|
||||
if help := flags.Parse(os.Args[1:]); help == flag.ErrHelp || flags.NArg() != 2 {
|
||||
printHelp(helpString.String())
|
||||
@@ -953,11 +988,15 @@ func main() {
|
||||
if *purgeOnly {
|
||||
log.Println("Purging repositories...")
|
||||
for _, pkg := range packages {
|
||||
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
|
||||
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
|
||||
}
|
||||
os.Exit(10)
|
||||
}
|
||||
|
||||
if len(*specificPackage) != 0 {
|
||||
importRepos([]string{*specificPackage})
|
||||
return
|
||||
}
|
||||
importRepos(packages)
|
||||
syncMaintainersToGitea(packages)
|
||||
}
|
||||
|
46
gitea_status_proxy/config.go
Normal file
46
gitea_status_proxy/config.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/tailscale/hujson"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ForgeEndpoint string `json:"forge_url"`
|
||||
Keys []string `json:"keys"`
|
||||
}
|
||||
|
||||
type contextKey string
|
||||
|
||||
const configKey contextKey = "config"
|
||||
|
||||
func ReadConfig(reader io.Reader) (*Config, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading config data: %w", err)
|
||||
}
|
||||
config := Config{}
|
||||
data, err = hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse json: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("error parsing json to api keys and target url: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadConfigFile(filename string) (*Config, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadConfig(file)
|
||||
}
|
15
gitea_status_proxy/handlers.go
Normal file
15
gitea_status_proxy/handlers.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func ConfigMiddleWare(cfg *Config) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.WithValue(r.Context(), configKey, cfg)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
}
|
169
gitea_status_proxy/main.go
Normal file
169
gitea_status_proxy/main.go
Normal file
@@ -0,0 +1,169 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type Status struct {
|
||||
Context string `json:"context"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
type StatusInput struct {
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
configFile := flag.String("config", "", "status proxy config file")
|
||||
flag.Parse()
|
||||
|
||||
if *configFile == "" {
|
||||
common.LogError("missing required argument config")
|
||||
return
|
||||
}
|
||||
|
||||
config, err := ReadConfigFile(*configFile)
|
||||
|
||||
if err != nil {
|
||||
common.LogError("Failed to read config file", err)
|
||||
return
|
||||
}
|
||||
|
||||
mux := http.NewServeMux()
|
||||
|
||||
mux.Handle("/repos/{owner}/{repo}/statuses/{sha}", ConfigMiddleWare(config)(http.HandlerFunc(StatusProxy)))
|
||||
|
||||
common.LogInfo("server up and listening on :3000")
|
||||
err = http.ListenAndServe(":3000", mux)
|
||||
|
||||
if err != nil {
|
||||
common.LogError("Server failed to start up", err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == http.MethodPost {
|
||||
config, ok := r.Context().Value(configKey).(*Config)
|
||||
|
||||
if !ok {
|
||||
common.LogError("Config missing from context")
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
header := r.Header.Get("Authorization")
|
||||
if header == "" {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
token_arr := strings.Split(header, " ")
|
||||
if len(token_arr) != 2 {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if !strings.EqualFold(token_arr[0], "Bearer") {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
token := token_arr[1]
|
||||
|
||||
if !slices.Contains(config.Keys, token) {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
owner := r.PathValue("owner")
|
||||
repo := r.PathValue("repo")
|
||||
sha := r.PathValue("sha")
|
||||
|
||||
if !ok {
|
||||
common.LogError("Failed to get config from context, is it set?")
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
posturl := fmt.Sprintf("%s/repos/%s/%s/statuses/%s", config.ForgeEndpoint, owner, repo, sha)
|
||||
decoder := json.NewDecoder(r.Body)
|
||||
var statusinput StatusInput
|
||||
err := decoder.Decode(&statusinput)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
status := Status{
|
||||
Context: "Build in obs",
|
||||
State: statusinput.State,
|
||||
TargetUrl: statusinput.TargetUrl,
|
||||
}
|
||||
|
||||
status_payload, err := json.Marshal(status)
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
client := &http.Client{}
|
||||
req, err := http.NewRequest("POST", posturl, bytes.NewBuffer(status_payload))
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ForgeToken := os.Getenv("GITEA_TOKEN")
|
||||
|
||||
if ForgeToken == "" {
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
common.LogError("GITEA_TOKEN was not set, all requests will fail")
|
||||
return
|
||||
}
|
||||
|
||||
req.Header.Add("Content-Type", "Content-Type")
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
|
||||
|
||||
resp, err := client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
common.LogError(fmt.Sprintf("Request to forge endpoint failed: %v", err))
|
||||
http.Error(w, http.StatusText(http.StatusBadGateway), http.StatusBadGateway)
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(resp.StatusCode)
|
||||
|
||||
/*
|
||||
the commented out section sets every key
|
||||
value from the headers, unsure if this
|
||||
leaks information from gitea
|
||||
|
||||
for k, v := range resp.Header {
|
||||
for _, vv := range v {
|
||||
w.Header().Add(k, vv)
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
_, err = io.Copy(w, resp.Body)
|
||||
if err != nil {
|
||||
common.LogError("Error copying response body: %v", err)
|
||||
}
|
||||
} else {
|
||||
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
}
|
@@ -11,6 +11,7 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
@@ -21,9 +22,10 @@ var acceptRx *regexp.Regexp
|
||||
var rejectRx *regexp.Regexp
|
||||
var groupName string
|
||||
|
||||
func InitRegex(groupName string) {
|
||||
acceptRx = regexp.MustCompile("\\s*:\\s*LGTM")
|
||||
rejectRx = regexp.MustCompile("\\s*:\\s*")
|
||||
func InitRegex(newGroupName string) {
|
||||
groupName = newGroupName
|
||||
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
|
||||
rejectRx = regexp.MustCompile("^:\\s*")
|
||||
}
|
||||
|
||||
func ParseReviewLine(reviewText string) (bool, string) {
|
||||
@@ -34,7 +36,18 @@ func ParseReviewLine(reviewText string) (bool, string) {
|
||||
return false, line
|
||||
}
|
||||
|
||||
return true, line[glen:]
|
||||
l := line[glen:]
|
||||
for idx, r := range l {
|
||||
if unicode.IsSpace(r) {
|
||||
continue
|
||||
} else if r == ':' {
|
||||
return true, l[idx:]
|
||||
} else {
|
||||
return false, line
|
||||
}
|
||||
}
|
||||
|
||||
return false, line
|
||||
}
|
||||
|
||||
func ReviewAccepted(reviewText string) bool {
|
||||
|
@@ -2,6 +2,76 @@ package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestReviews(t *testing.T) {
|
||||
func TestReviewApprovalCheck(t *testing.T) {
|
||||
tests := []struct {
|
||||
Name string
|
||||
GroupName string
|
||||
InString string
|
||||
Approved bool
|
||||
Rejected bool
|
||||
}{
|
||||
{
|
||||
Name: "Empty String",
|
||||
GroupName: "group",
|
||||
InString: "",
|
||||
},
|
||||
{
|
||||
Name: "Random Text",
|
||||
GroupName: "group",
|
||||
InString: "some things LGTM",
|
||||
},
|
||||
{
|
||||
Name: "Group name with Random Text means disapproval",
|
||||
GroupName: "group",
|
||||
InString: "@group: some things LGTM",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "Bad name with Approval",
|
||||
GroupName: "group2",
|
||||
InString: "@group: LGTM",
|
||||
},
|
||||
{
|
||||
Name: "Bad name with Approval",
|
||||
GroupName: "group2",
|
||||
InString: "@group: LGTM",
|
||||
},
|
||||
{
|
||||
Name: "LGTM approval",
|
||||
GroupName: "group2",
|
||||
InString: "@group2: LGTM",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "approval",
|
||||
GroupName: "group2",
|
||||
InString: "@group2: approved",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "approval",
|
||||
GroupName: "group2",
|
||||
InString: "@group2: approve",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "disapproval",
|
||||
GroupName: "group2",
|
||||
InString: "@group2: disapprove",
|
||||
Rejected: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
InitRegex(test.GroupName)
|
||||
|
||||
if r := ReviewAccepted(test.InString); r != test.Approved {
|
||||
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
|
||||
}
|
||||
if r := ReviewRejected(test.InString); r != test.Rejected {
|
||||
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@@ -315,7 +315,9 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
}
|
||||
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
|
||||
meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name)
|
||||
meta.PublicFlags = common.Flags{Contents: "<disable/>"}
|
||||
// QE wants it published ... also we should not hardcode it here, since
|
||||
// it is configurable via the :PullRequest project
|
||||
// meta.PublicFlags = common.Flags{Contents: "<disable/>"}
|
||||
|
||||
meta.Groups = nil
|
||||
meta.Persons = nil
|
||||
@@ -459,7 +461,7 @@ func FetchOurLatestActionableReview(gitea common.Gitea, org, repo string, id int
|
||||
|
||||
for idx := len(reviews) - 1; idx >= 0; idx-- {
|
||||
review := reviews[idx]
|
||||
if review.User != nil || review.User.UserName == Username {
|
||||
if review.User == nil || review.User.UserName == Username {
|
||||
if IsDryRun {
|
||||
// for purposes of moving forward a no-op check
|
||||
return review, nil
|
||||
@@ -547,7 +549,7 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
|
||||
}
|
||||
|
||||
if pr.State != "closed" {
|
||||
common.LogInfo(" ignoring peding PR", thread.Subject.HTMLURL, " state:", pr.State)
|
||||
common.LogInfo(" ignoring pending PR", thread.Subject.HTMLURL, " state:", pr.State)
|
||||
return false
|
||||
}
|
||||
|
||||
|
@@ -22,6 +22,7 @@ import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
@@ -78,7 +79,7 @@ func ProjectStatusSummarySvg(project string) []byte {
|
||||
return ret.Bytes()
|
||||
}
|
||||
|
||||
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
|
||||
func PackageStatusSummarySvg(status *common.PackageBuildStatus) []byte {
|
||||
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
|
||||
if !ok {
|
||||
buildStatus = common.ObsBuildStatusDetails["error"]
|
||||
@@ -108,8 +109,10 @@ func main() {
|
||||
key := flag.String("key-file", "", "Private key for the TLS certificate")
|
||||
listen := flag.String("listen", "[::1]:8080", "Listening string")
|
||||
disableTls := flag.Bool("no-tls", false, "Disable TLS")
|
||||
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
|
||||
obsHost := flag.String("obs-host", "https://api.opensuse.org", "OBS API endpoint for package status information")
|
||||
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
|
||||
RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
|
||||
Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
|
||||
flag.Parse()
|
||||
|
||||
common.PanicOnError(common.RequireObsSecretToken())
|
||||
@@ -143,21 +146,25 @@ func main() {
|
||||
|
||||
res.Header().Add("content-type", "image/svg+xml")
|
||||
|
||||
prjStatus := GetCurrentStatus(prj)
|
||||
if prjStatus == nil {
|
||||
status := GetDetailedBuildStatus(prj, pkg, repo, arch)
|
||||
res.Write(PackageStatusSummarySvg(status))
|
||||
})
|
||||
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}/buildlog", func(res http.ResponseWriter, req *http.Request) {
|
||||
prj := req.PathValue("Project")
|
||||
pkg := req.PathValue("Package")
|
||||
repo := req.PathValue("Repository")
|
||||
arch := req.PathValue("Arch")
|
||||
|
||||
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
|
||||
data, err := obs.BuildLog(prj, pkg, repo, arch)
|
||||
if err != nil {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
|
||||
return
|
||||
}
|
||||
defer data.Close()
|
||||
|
||||
for _, r := range prjStatus.Result {
|
||||
if r.Arch == arch && r.Repository == repo {
|
||||
for _, status := range r.Status {
|
||||
if status.Package == pkg {
|
||||
res.Write(PackageStatusSummarySvg(status))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
io.Copy(res, data)
|
||||
})
|
||||
|
||||
go ProcessUpdates()
|
||||
|
3
obs-status-service/rabbit.go
Normal file
3
obs-status-service/rabbit.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package main
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -24,14 +24,86 @@ type StatusUpdateMsg struct {
|
||||
|
||||
func GetCurrentStatus(project string) *common.BuildResultList {
|
||||
statusMutex.RLock()
|
||||
defer statusMutex.RUnlock()
|
||||
|
||||
if ret, found := CurrentStatus[project]; found {
|
||||
statusMutex.RUnlock()
|
||||
return ret
|
||||
} else {
|
||||
go WatchObsProject(obs, project)
|
||||
}
|
||||
|
||||
res, err := obs.BuildStatus(project)
|
||||
statusMutex.RUnlock()
|
||||
statusMutex.Lock()
|
||||
defer statusMutex.Unlock()
|
||||
|
||||
if err != nil {
|
||||
return res
|
||||
}
|
||||
CurrentStatus[project] = res
|
||||
|
||||
now := time.Now().Unix()
|
||||
CurrentStatus[project].LastUpdate = now
|
||||
for _, r := range res.Result {
|
||||
r.LastUpdate = now
|
||||
for _, p := range r.Status {
|
||||
p.LastUpdate = now
|
||||
}
|
||||
slices.SortFunc(r.Status, packageSort)
|
||||
}
|
||||
slices.SortFunc(res.Result, repoSort)
|
||||
return res
|
||||
}
|
||||
|
||||
|
||||
func updatePrjPackage(prjState *common.BuildResultList, pkg string, now int64, pkgState *common.BuildResultList) {
|
||||
for prjState.
|
||||
Result[0].Status[0].Package
|
||||
}
|
||||
|
||||
func extractPackageBuildStatus(prjState *common.BuildResultList, pkg string) []*common.PackageBuildStatus {
|
||||
|
||||
}
|
||||
|
||||
func GetDetailedPackageBuildStatus(prj, pkg string) []*common.PackageBuildStatus {
|
||||
statusMutex.RLock()
|
||||
now := time.Now().Unix()
|
||||
|
||||
cachedPrj, found := CurrentStatus[prj]
|
||||
if found {
|
||||
statusMutex.Unlock()
|
||||
if now-cachedPrj.LastUpdate < 60 {
|
||||
return extractPackageBuildStatus(cachedPrj, pkg)
|
||||
}
|
||||
}
|
||||
|
||||
ret, err := obs.BuildStatus(prj, pkg)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
statusMutex.Lock()
|
||||
defer statusMutex.Unlock()
|
||||
|
||||
updatePrjPackage(cachedPrj, pkg, now, ret)
|
||||
return extractPackageBuildStatus(cachedPrj, pkg)
|
||||
}
|
||||
|
||||
func GetDetailedBuildStatus(prj, pkg, repo, arch string) *common.PackageBuildStatus {
|
||||
prjStatus := GetCurrentStatus(prj)
|
||||
if prjStatus == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, r := range prjStatus.Result {
|
||||
if r.Arch == arch && r.Repository == repo {
|
||||
for _, status := range r.Status {
|
||||
if status.Package == pkg {
|
||||
return &status
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ProcessUpdates() {
|
||||
@@ -53,30 +125,3 @@ func ProcessUpdates() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
|
||||
old_state := ""
|
||||
|
||||
mutex.Lock()
|
||||
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
|
||||
mutex.Unlock()
|
||||
return
|
||||
} else {
|
||||
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
|
||||
mutex.Unlock()
|
||||
}
|
||||
|
||||
LogDebug("+ watching", ObsProject)
|
||||
opts := common.BuildResultOptions{}
|
||||
for {
|
||||
state, err := obs.BuildStatusWithState(ObsProject, &opts)
|
||||
if err != nil {
|
||||
log.Println(" *** Error fetching build for", ObsProject, err)
|
||||
time.Sleep(time.Minute)
|
||||
} else {
|
||||
opts.OldState = state.State
|
||||
LogDebug(" --> update", ObsProject, " => ", old_state)
|
||||
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -526,7 +526,7 @@ func main() {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
var defs common.ListenDefinitions
|
||||
defs := &common.RabbitMQGiteaEventsProcessor{}
|
||||
var err error
|
||||
|
||||
if len(*basePath) == 0 {
|
||||
@@ -557,7 +557,7 @@ func main() {
|
||||
}
|
||||
log.Println("*** Reconfiguring ***")
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
defs.UpdateTopics()
|
||||
defs.Connection().UpdateTopics(defs)
|
||||
}
|
||||
}()
|
||||
signal.Notify(signalChannel, syscall.SIGHUP)
|
||||
@@ -573,18 +573,17 @@ func main() {
|
||||
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
|
||||
defs.GitAuthor = GitAuthor
|
||||
defs.RabbitURL, err = url.Parse(*rabbitUrl)
|
||||
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
|
||||
if err != nil {
|
||||
log.Panicf("cannot parse server URL. Err: %#v\n", err)
|
||||
}
|
||||
|
||||
go consistencyCheckProcess()
|
||||
log.Println("defs:", defs)
|
||||
log.Println("defs:", *defs)
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
|
||||
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
|
||||
|
||||
log.Fatal(defs.ProcessRabbitMQEvents())
|
||||
log.Fatal(common.ProcessRabbitMQEvents(defs))
|
||||
}
|
||||
|
@@ -162,9 +162,9 @@ func main() {
|
||||
checker := CreateDefaultStateChecker(*checkOnStart, req, Gitea, time.Duration(*checkIntervalHours)*time.Hour)
|
||||
go checker.ConsistencyCheckProcess()
|
||||
|
||||
listenDefs := common.ListenDefinitions{
|
||||
listenDefs := &common.RabbitMQGiteaEventsProcessor{
|
||||
Orgs: orgs,
|
||||
GitAuthor: GitAuthor,
|
||||
// GitAuthor: GitAuthor,
|
||||
Handlers: map[string]common.RequestProcessor{
|
||||
common.RequestType_PR: req,
|
||||
common.RequestType_PRSync: req,
|
||||
@@ -172,7 +172,7 @@ func main() {
|
||||
common.RequestType_PRReviewRejected: req,
|
||||
},
|
||||
}
|
||||
listenDefs.RabbitURL, _ = url.Parse(*rabbitUrl)
|
||||
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
|
||||
|
||||
common.PanicOnError(listenDefs.ProcessRabbitMQEvents())
|
||||
common.PanicOnError(common.ProcessRabbitMQEvents(listenDefs))
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/opentracing/opentracing-go/log"
|
||||
@@ -17,6 +18,33 @@ func prGitBranchNameForPR(repo string, prNo int) string {
|
||||
return fmt.Sprintf("PR_%s#%d", repo, prNo)
|
||||
}
|
||||
|
||||
func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
|
||||
title_refs := make([]string, 0, len(prset.PRs)-1)
|
||||
refs := make([]string, 0, len(prset.PRs)-1)
|
||||
|
||||
for _, pr := range prset.PRs {
|
||||
org, repo, idx := pr.PRComponents()
|
||||
|
||||
title_refs = append(title_refs, repo)
|
||||
ref := fmt.Sprintf(common.PrPattern, org, repo, idx)
|
||||
refs = append(refs, ref)
|
||||
}
|
||||
|
||||
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
|
||||
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ",\n")
|
||||
|
||||
if prset.Config.ManualMergeOnly {
|
||||
desc = desc + "\n\nManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
|
||||
}
|
||||
if prset.Config.ManualMergeProject {
|
||||
desc = desc + "\nManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
|
||||
}
|
||||
if !prset.Config.ManualMergeOnly && !prset.Config.ManualMergeProject {
|
||||
desc = desc + "\nAutomatic merge enabled. This will merge when all review requirements are satisfied."
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func verifyRepositoryConfiguration(repo *models.Repository) error {
|
||||
if repo.AutodetectManualMerge && repo.AllowManualMerge {
|
||||
return nil
|
||||
@@ -80,46 +108,42 @@ func AllocatePRProcessor(req *common.PullRequestWebhookEvent, configs common.Aut
|
||||
}
|
||||
common.LogDebug("git path:", git.GetPath())
|
||||
|
||||
// git.GitExecOrPanic("", "config", "set", "--global", "advice.submoduleMergeConflict", "false")
|
||||
// git.GitExecOrPanic("", "config", "set", "--global", "advice.mergeConflict", "false")
|
||||
|
||||
return &PRProcessor{
|
||||
config: config,
|
||||
git: git,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string, []string, error) {
|
||||
prjGitPR, err := prset.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
|
||||
git := pr.git
|
||||
subList, err := git.GitSubmoduleList(common.DefaultGitPrj, "HEAD")
|
||||
if err != nil {
|
||||
common.LogError("Error fetching submodule list for PrjGit", err)
|
||||
return nil, nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
refs := make([]string, 0, len(prset.PRs))
|
||||
title_refs := make([]string, 0, len(prset.PRs))
|
||||
for _, pr := range prset.PRs {
|
||||
if prset.IsPrjGitPR(pr.PR) {
|
||||
continue
|
||||
}
|
||||
|
||||
org := pr.PR.Base.Repo.Owner.UserName
|
||||
repo := pr.PR.Base.Repo.Name
|
||||
idx := pr.PR.Index
|
||||
org, repo, idx := pr.PRComponents()
|
||||
prHead := pr.PR.Head.Sha
|
||||
revert := false
|
||||
|
||||
if pr.PR.State != "open" {
|
||||
// remove PR from PrjGit
|
||||
var valid bool
|
||||
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
|
||||
common.LogError("Failed fetching original submodule commit id for repo")
|
||||
return nil, nil, err
|
||||
prjGitPR, err := prset.GetPrjGitPR()
|
||||
if prjGitPR != nil {
|
||||
// remove PR from PrjGit
|
||||
var valid bool
|
||||
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
|
||||
common.LogError("Failed fetching original submodule commit id for repo")
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
revert = true
|
||||
}
|
||||
|
||||
@@ -134,9 +158,6 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string,
|
||||
|
||||
if revert {
|
||||
commitMsg = fmt.Sprintln("auto-created for", repo, "\n\nThis commit was autocreated by", GitAuthor, "removing\n", ref)
|
||||
} else {
|
||||
refs = append(refs, ref)
|
||||
title_refs = append(title_refs, repo)
|
||||
}
|
||||
|
||||
updateSubmoduleInPR(submodulePath, prHead, git)
|
||||
@@ -155,7 +176,7 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string,
|
||||
common.LogError("Failed to find expected repo:", repo)
|
||||
}
|
||||
}
|
||||
return title_refs, refs, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet) error {
|
||||
@@ -166,17 +187,16 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
|
||||
common.LogError("Failed to fetch PrjGit repository data.", PrjGitOrg, PrjGitRepo, err)
|
||||
return err
|
||||
}
|
||||
remoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
|
||||
RemoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, remoteName+"/"+PrjGitBranch)
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, RemoteName+"/"+PrjGitBranch)
|
||||
|
||||
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
|
||||
if err != nil {
|
||||
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
|
||||
return err
|
||||
}
|
||||
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
|
||||
if err != nil {
|
||||
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
|
||||
return err
|
||||
}
|
||||
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
|
||||
@@ -186,11 +206,9 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
|
||||
}
|
||||
|
||||
if !common.IsDryRun && headCommit != newHeadCommit {
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
|
||||
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch,
|
||||
"Forwarded PRs: "+strings.Join(title_refs, ", "),
|
||||
fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor)+strings.Join(refs, ", "),
|
||||
)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
|
||||
title, desc := PrjGitDescription(prset)
|
||||
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
|
||||
if err != nil {
|
||||
common.LogError("Error creating PrjGit PR:", err)
|
||||
return err
|
||||
@@ -199,12 +217,40 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
|
||||
RemoveDeadline: true,
|
||||
})
|
||||
|
||||
prset.AddPR(pr)
|
||||
prinfo := prset.AddPR(pr)
|
||||
prinfo.RemoteName = RemoteName
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch string) error {
|
||||
git := pr.git
|
||||
PrjGitPR, err := prset.GetPrjGitPR()
|
||||
common.PanicOnError(err)
|
||||
|
||||
remoteBranch := PrjGitPR.RemoteName + "/" + branch
|
||||
|
||||
common.LogDebug("Rebasing on top of", remoteBranch)
|
||||
for conflict := git.GitExec(common.DefaultGitPrj, "rebase", remoteBranch); conflict != nil; {
|
||||
statuses, err := git.GitStatus(common.DefaultGitPrj)
|
||||
if err != nil {
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
|
||||
common.PanicOnError(err)
|
||||
}
|
||||
for _, s := range statuses {
|
||||
if s.SubmoduleChanges != "S..." {
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
|
||||
return fmt.Errorf("Unexpected conflict in rebase. %s", s)
|
||||
}
|
||||
}
|
||||
conflict = git.GitExec(common.DefaultGitPrj, "rebase", "--skip")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
_, _, PrjGitBranch := prset.Config.GetPrjGit()
|
||||
PrjGitPR, err := prset.GetPrjGitPR()
|
||||
if err != nil {
|
||||
common.LogError("Updating PrjGitPR but not found?", err)
|
||||
@@ -215,16 +261,23 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
PrjGit := PrjGitPR.PR.Base.Repo
|
||||
prjGitPRbranch := PrjGitPR.PR.Head.Name
|
||||
|
||||
remoteName, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
|
||||
PrjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitBranch)
|
||||
|
||||
forcePush := false
|
||||
// trust Gitea here on mergeability
|
||||
if !PrjGitPR.PR.Mergeable {
|
||||
common.PanicOnError(pr.RebaseAndSkipSubmoduleCommits(prset, PrjGitBranch))
|
||||
forcePush = true
|
||||
}
|
||||
|
||||
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
|
||||
if err != nil {
|
||||
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
|
||||
return err
|
||||
}
|
||||
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
|
||||
if err != nil {
|
||||
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
|
||||
return err
|
||||
}
|
||||
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
|
||||
@@ -234,12 +287,14 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
}
|
||||
|
||||
if !common.IsDryRun && headCommit != newHeadCommit {
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
|
||||
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
|
||||
if forcePush {
|
||||
params = slices.Insert(params, 1, "-f")
|
||||
}
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
|
||||
|
||||
// update PR
|
||||
PrjGitTitle := "Forwarded PRs: " + strings.Join(title_refs, ", ")
|
||||
PrjGitBody := fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ", ")
|
||||
|
||||
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
|
||||
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
|
||||
RemoveDeadline: true,
|
||||
Title: PrjGitTitle,
|
||||
@@ -285,9 +340,7 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
|
||||
common.LogInfo("PR State is closed:", prjGitPR.PR.State)
|
||||
for _, pr := range prset.PRs {
|
||||
if pr.PR.State == "open" {
|
||||
org := pr.PR.Base.Repo.Owner.UserName
|
||||
repo := pr.PR.Base.Repo.Name
|
||||
idx := pr.PR.Index
|
||||
org, repo, idx := pr.PRComponents()
|
||||
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
|
||||
State: "closed",
|
||||
})
|
||||
@@ -296,6 +349,14 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(prset.PRs) > 1 {
|
||||
for _, pr := range prset.PRs {
|
||||
if prset.IsPrjGitPR(pr.PR) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err = pr.UpdatePrjGitPR(prset); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -314,13 +375,10 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
|
||||
// make sure that prjgit is consistent and only submodules that are to be *updated*
|
||||
// reset anything that changed that is not part of the prset
|
||||
// package removals/additions are *not* counted here
|
||||
org, repo, branch := config.GetPrjGit()
|
||||
if pr, err := prset.GetPrjGitPR(); err == nil {
|
||||
remote, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, pr.PR.Base.Repo.CloneURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", remote, pr.PR.MergeBase, pr.PR.Head.Ref)
|
||||
|
||||
common.LogDebug("Fetch done")
|
||||
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.MergeBase)
|
||||
common.LogDebug("Submodule parse begin")
|
||||
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.RemoteName+"/"+branch) // merge base must remote branch, checked in prjgit udate
|
||||
common.PanicOnError(err)
|
||||
new_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.Head.Sha)
|
||||
common.PanicOnError(err)
|
||||
@@ -357,7 +415,6 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
|
||||
}
|
||||
|
||||
common.LogDebug(" num of reviewers:", len(prjGitPR.PR.RequestedReviewers))
|
||||
org, repo, branch := config.GetPrjGit()
|
||||
maintainers, err := common.FetchProjectMaintainershipData(Gitea, org, repo, branch)
|
||||
if err != nil {
|
||||
return err
|
||||
|
Reference in New Issue
Block a user