10 Commits

Author SHA256 Message Date
ec5ac4fca3 Don't fail on project git pull request creation. 2025-07-11 10:53:38 +02:00
d7132727a7 Create Pull Requests to specified branches
instead of always using DefaultBranch. This means that target needs
always gets specified now.
2025-07-11 10:52:00 +02:00
74f40f536a message typo 2025-07-11 09:40:36 +02:00
cde46e85f3 Enable code stream publishing 2025-07-11 09:40:36 +02:00
4378568953 Fix logic in crash protection
We must not access review.User object if it is nil
2025-07-11 09:40:36 +02:00
c286e12b67 Try to use Staging Master Project as default build target if available
This allows us to set custom build configuration or repository sets for
pull request projects.
2025-07-11 09:40:36 +02:00
cc9ad1703d Don't crash when new packages got added
The build result request of the base project is failing in this
situation, since the requested package does not exist.

Therefore we need to have seperate lists for proper handling.
2025-07-11 09:40:36 +02:00
2f8b6b4ade Temporary hack to include also changed directories
Need to be clean'd up via proper subdir handling
2025-07-11 09:40:36 +02:00
c11def6005 handle build results different when request with lastbuild=1
In that case we need to

 * ignore repo state as it is the current one. There is no last state
 * handle "unkown" state as finished as the package was never attempted,
   but we don't know the reason (eg. broken source or unresolvable)
2025-07-11 09:40:36 +02:00
0bee48472d Implementing cleanup of closed requests 2025-07-11 09:40:36 +02:00
29 changed files with 369 additions and 1229 deletions

View File

@@ -59,7 +59,6 @@ type AutogitConfig struct {
Reviewers []string // only used by `pr` workflow
ReviewGroups []ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers

View File

@@ -769,8 +769,6 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
done.Lock()
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
LogDebug("Getting submodules for:", commitId)
go func() {
defer done.Unlock()
defer close(data_out.ch)
@@ -804,6 +802,7 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
for _, te := range tree.items {
if te.isTree() {
trees[p+te.name+"/"] = te.hash
submoduleList[p+te.name] = te.hash
} else if te.isSubmodule() {
submoduleList[p+te.name] = te.hash
}
@@ -918,16 +917,6 @@ type GitStatusData struct {
Path string
Status int
States [3]string
/*
<sub> A 4 character field describing the submodule state.
"N..." when the entry is not a submodule.
"S<c><m><u>" when the entry is a submodule.
<c> is "C" if the commit changed; otherwise ".".
<m> is "M" if it has tracked changes; otherwise ".".
<u> is "U" if there are untracked changes; otherwise ".".
*/
SubmoduleChanges string
}
func parseGitStatusHexString(data io.ByteReader) (string, error) {
@@ -950,20 +939,6 @@ func parseGitStatusHexString(data io.ByteReader) (string, error) {
}
}
func parseGitStatusString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 || c == ' ' {
return string(str), nil
}
str = append(str, c)
}
}
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
@@ -1004,7 +979,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Modified
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1014,11 +989,11 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Renamed
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
ret.States[0], err = parseGitStatusStringWithSpace(data)
ret.States[0], err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1028,7 +1003,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Untracked
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1038,22 +1013,15 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Ignored
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
case 'u':
var err error
if err = skipGitStatusEntry(data, 2); err != nil {
if err = skipGitStatusEntry(data, 7); err != nil {
return nil, err
}
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
return nil, err
}
if err = skipGitStatusEntry(data, 4); err != nil {
return nil, err
}
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
@@ -1064,7 +1032,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Unmerged
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}

View File

@@ -555,8 +555,6 @@ func TestGitStatusParse(t *testing.T) {
Path: ".gitmodules",
Status: GitStatus_Unmerged,
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
SubmoduleChanges: "N...",
},
},
},

View File

@@ -718,18 +718,20 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
}
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
limit := int64(20)
page := int64(1)
resCount := 1
resCount := limit
retData := []*models.TimelineComment{}
for resCount > 0 {
for resCount == limit {
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
issue.NewIssueGetCommentsAndTimelineParams().
WithOwner(org).
WithRepo(repo).
WithIndex(idx).
WithPage(&page),
WithPage(&page).
WithLimit(&limit),
gitea.transport.DefaultAuthentication,
)
@@ -737,13 +739,11 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
return nil, err
}
resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount)
resCount = int64(len(res.Payload))
page++
retData = append(retData, res.Payload...)
}
LogDebug("total results:", len(retData))
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
return time.Time(b.Created).Compare(time.Time(a.Created))

View File

@@ -22,32 +22,55 @@ import (
"crypto/tls"
"fmt"
"net/url"
"runtime/debug"
"slices"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
type RabbitConnection struct {
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type ListenDefinitions struct {
RabbitURL *url.URL // amqps://user:password@host/queue
queueName string
ch *rabbitmq.Channel
GitAuthor string
Handlers map[string]RequestProcessor
Orgs []string
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitProcessor interface {
GenerateTopics() []string
Connection() *RabbitConnection
ProcessRabbitMessage(msg RabbitMessage) error
}
type RabbitMessage rabbitmq.Delivery
func (l *RabbitConnection) ProcessTopicChanges() {
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
for {
topic, ok := <-l.topicSubChanges
if !ok {
@@ -57,11 +80,11 @@ func (l *RabbitConnection) ProcessTopicChanges() {
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
@@ -70,7 +93,7 @@ func (l *RabbitConnection) ProcessTopicChanges() {
}
}
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
@@ -129,7 +152,7 @@ func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.ProcessTopicChanges()
go l.processTopicChanges(ch, q.Name)
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
@@ -151,18 +174,18 @@ func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
}
}
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.ConnectAndProcessRabbitMQ(ch)
go l.connectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.ProcessRabbitMQ(ch)
err := l.processRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
@@ -171,20 +194,49 @@ func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
}
}
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = processor.GenerateTopics()
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
ch := make(chan RabbitMessage, 100)
go l.ConnectAndProcessRabbitMQ(ch)
go l.connectAndProcessRabbitMQ(ch)
return ch
}
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
newTopics := processor.GenerateTopics()
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}
func (l *ListenDefinitions) generateTopics() []string {
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
scope := "suse"
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range l.Orgs {
for requestType, _ := range l.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (l *ListenDefinitions) UpdateTopics() {
newTopics := l.generateTopics()
j := 0
next_new_topic:
@@ -221,8 +273,14 @@ next_new_topic:
l.topics = newTopics
}
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
ch := processor.Connection().ConnectToRabbitMQ(processor)
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
LogDebug("# Handlers:", len(l.Handlers))
LogDebug("# Orgs:", len(l.Orgs))
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = l.generateTopics()
ch := l.connectToRabbitMQ()
for {
msg, ok := <-ch
@@ -231,8 +289,36 @@ func ProcessRabbitMQEvents(processor RabbitProcessor) error {
}
LogDebug("event:", msg.RoutingKey)
if err := processor.ProcessRabbitMessage(msg); err != nil {
LogError("Error processing", msg.RoutingKey, err)
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(l.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
continue
}
LogDebug("org:", org, "type:", reqType)
if handler, found := l.Handlers[reqType]; found {
/* h, err := CreateRequestHandler()
if err != nil {
log.Println("Cannot create request handler", err)
continue
}
*/
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
continue
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
}
}

View File

@@ -50,13 +50,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
u, _ := url.Parse("amqps://rabbit.example.com")
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l := &RabbitMQGiteaEventsProcessor{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
c: &RabbitConnection{
RabbitURL: u,
topicSubChanges: make(chan string, len(test.topicDelta)*10),
},
l := ListenDefinitions{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
topicSubChanges: make(chan string, len(test.topicDelta)*10),
RabbitURL: u,
}
slices.Sort(test.topicDelta)
@@ -66,11 +64,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
}
changes := []string{}
l.c.UpdateTopics(l)
l.UpdateTopics()
a:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
changes = []string{}
@@ -80,13 +78,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
l.Orgs = test.orgs2
l.c.UpdateTopics(l)
l.UpdateTopics()
changes = []string{}
b:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
slices.Sort(changes)

View File

@@ -1,56 +0,0 @@
package common
import (
"os"
"path"
"strings"
"gopkg.in/yaml.v3"
)
type Manifest struct {
Subdirectories []string
}
func (m *Manifest) SubdirForPackage(pkg string) string {
if m == nil {
return pkg
}
idx := -1
matchLen := 0
basePkg := path.Base(pkg)
lowercasePkg := strings.ToLower(basePkg)
for i, sub := range m.Subdirectories {
basename := strings.ToLower(path.Base(sub))
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
idx = i
matchLen = len(basename)
}
}
if idx > -1 {
return path.Join(m.Subdirectories[idx], basePkg)
}
return pkg
}
func ReadManifestFile(filename string) (*Manifest, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
return ParseManifestFile(data)
}
func ParseManifestFile(data []byte) (*Manifest, error) {
ret := &Manifest{}
err := yaml.Unmarshal(data, ret)
if err != nil {
return nil, err
}
return ret, nil
}

View File

@@ -1,56 +0,0 @@
package common_test
import (
"testing"
"src.opensuse.org/autogits/common"
)
func TestManifestSubdirAssignments(t *testing.T) {
tests := []struct {
Name string
ManifestContent string
Packages []string
ManifestLocations []string
}{
{
Name: "empty manifest",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
ManifestLocations: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
},
{
Name: "only few subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "b/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest with trailing /",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
},
}
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
m, err := common.ParseManifestFile([]byte(test.ManifestContent))
if err != nil {
t.Fatal(err)
}
for i, pkg := range test.Packages {
expected := test.ManifestLocations[i]
if l := m.SubdirForPackage(pkg); l != expected {
t.Error("Expected:", expected, "but got:", l)
}
}
})
}
}

View File

@@ -551,7 +551,6 @@ func (c *ObsClient) DeleteProject(project string) error {
query.Add("force", "1")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("DELETE", url.String(), nil)
if err != nil {
return err
}
@@ -562,44 +561,25 @@ func (c *ObsClient) DeleteProject(project string) error {
}
return nil
}
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
query := url.Query()
query.Add("nostream", "1")
query.Add("start", "0")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("GET", url.String(), nil)
if err != nil {
return nil, err
}
return res.Body, nil
}
type PackageBuildStatus struct {
Package string `xml:"package,attr"`
Code string `xml:"code,attr"`
Details string `xml:"details"`
LastUpdate int64
}
type BuildResult struct {
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
LastUpdate int64
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
}
type Binary struct {
@@ -619,7 +599,6 @@ type BuildResultList struct {
Result []BuildResult `xml:"result"`
isLastBuild bool
LastUpdate int64
}
func (r *BuildResultList) GetPackageList() []string {
@@ -642,48 +621,6 @@ func (r *BuildResultList) GetPackageList() []string {
return pkgList
}
func packageSort(A, B PackageBuildStatus) int {
return strings.Compare(A.Package, B.Package)
}
func repoSort(A, B BuildResult) int {
eq := strings.Compare(A.Project, B.Project)
if eq == 0 {
eq = strings.Compare(A.Repository, B.Repository)
if eq == 0 {
eq = strings.Compare(A.Arch, B.Arch)
}
}
return eq
}
func (r *BuildResultList) MergePackageState(now int64, pkgState *BuildResultList) {
for _, nr := range pkgState.Result {
idx, found := slices.BinarySearchFunc(r.Result, nr, repoSort)
// not found, new repo?
if !found {
nr.LastUpdate = now
r.Result = slices.Insert(r.Result, idx, nr)
continue
}
// update current repo
repo := &r.Result[idx]
// update all the packages in the repo
for _, p := range nr.Status {
p.LastUpdate = now
idx, found := slices.BinarySearchFunc(repo.Status, p, packageSort)
if !found {
repo.Status = slices.Insert(repo.Status, idx, p)
continue
}
repo.Status[idx] = p
}
}
}
func (r *BuildResultList) BuildResultSummary() (success, finished bool) {
if r == nil {
return false, false
@@ -951,11 +888,5 @@ func (c *ObsClient) BuildStatusWithState(project string, opts *BuildResultOption
if ret != nil {
ret.isLastBuild = opts.LastBuild
}
slices.SortFunc(ret.Result, repoSort)
for _, r := range ret.Result {
slices.SortFunc(r.Status, packageSort)
}
return ret, err
}

View File

@@ -25,13 +25,6 @@ type PRSet struct {
BotUser string
}
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
org = prinfo.PR.Base.Repo.Owner.UserName
repo = prinfo.PR.Base.Repo.Name
idx = prinfo.PR.Index
return
}
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
for _, p := range currentSet {
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
@@ -128,28 +121,27 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
}, nil
}
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
func (rs *PRSet) Contains(pr *models.PullRequest) bool {
for _, p := range rs.PRs {
if p.PR.Base.RepoID == pr.Base.RepoID &&
p.PR.Head.Sha == pr.Head.Sha &&
p.PR.Base.Name == pr.Base.Name {
return p, true
return true
}
}
return nil, false
return false
}
func (rs *PRSet) AddPR(pr *models.PullRequest) *PRInfo {
if pr, found := rs.Find(pr); found {
return pr
func (rs *PRSet) AddPR(pr *models.PullRequest) error {
if rs.Contains(pr) {
return nil
}
prinfo := &PRInfo{
rs.PRs = append(rs.PRs, &PRInfo{
PR: pr,
}
rs.PRs = append(rs.PRs, prinfo)
return prinfo
})
return nil
}
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
@@ -180,15 +172,6 @@ func (rs *PRSet) GetPrjGitPR() (*PRInfo, error) {
return nil, PRSet_PrjGitMissing
}
func (rs *PRSet) NeedRecreatingPrjGit(currentBranchHash string) bool {
pr, err := rs.GetPrjGitPR()
if err != nil {
return true
}
return pr.PR.Base.Sha == currentBranchHash
}
func (rs *PRSet) IsConsistent() bool {
prjpr_info, err := rs.GetPrjGitPR()
if err != nil {
@@ -319,7 +302,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
}
pr.Reviews = r
if !pr.Reviews.IsManualMergeOK() {
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
LogInfo("Not approved manual merge")
return false
}
}

View File

@@ -15,23 +15,7 @@ import (
"src.opensuse.org/autogits/common/gitea-generated/models"
mock_common "src.opensuse.org/autogits/common/mock"
)
/*
func TestCockpit(t *testing.T) {
common.SetLoggingLevel(common.LogLevelDebug)
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Fail to timeline", err)
}
t.Log(tl)
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Error:", err)
}
t.Error(r)
}
*/
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
timeline := make([]*models.TimelineComment, len(reviews))
for idx, review := range reviews {

View File

@@ -1,130 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"fmt"
"runtime/debug"
"slices"
"strings"
)
const (
RequestType_CreateBrachTag = "create"
RequestType_DeleteBranchTag = "delete"
RequestType_Fork = "fork"
RequestType_Issue = "issues"
RequestType_IssueAssign = "issue_assign"
RequestType_IssueComment = "issue_comment"
RequestType_IssueLabel = "issue_label"
RequestType_IssueMilestone = "issue_milestone"
RequestType_Push = "push"
RequestType_Repository = "repository"
RequestType_Release = "release"
RequestType_PR = "pull_request"
RequestType_PRAssign = "pull_request_assign"
RequestType_PRLabel = "pull_request_label"
RequestType_PRComment = "pull_request_comment"
RequestType_PRMilestone = "pull_request_milestone"
RequestType_PRSync = "pull_request_sync"
RequestType_PRReviewAccepted = "pull_request_review_approved"
RequestType_PRReviewRejected = "pull_request_review_rejected"
RequestType_PRReviewRequest = "pull_request_review_request"
RequestType_PRReviewComment = "pull_request_review_comment"
RequestType_Wiki = "wiki"
)
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type RabbitMQGiteaEventsProcessor struct {
Handlers map[string]RequestProcessor
Orgs []string
c *RabbitConnection
}
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
if gitea.c == nil {
gitea.c = &RabbitConnection{}
}
return gitea.c
}
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
scope := "suse"
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range gitea.Orgs {
for requestType, _ := range gitea.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(gitea.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
return nil
}
LogDebug("org:", org, "type:", reqType)
if handler, found := gitea.Handlers[reqType]; found {
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
return nil
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
return fmt.Errorf("Invalid routing key: %s", route)
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}

View File

@@ -1,115 +0,0 @@
package common
import (
"encoding/json"
"errors"
"fmt"
"slices"
"strings"
)
const (
ObsMessageType_PackageBuildFail = "package.build_fail"
ObsMessageType_PackageBuildSuccess = "package.build_success"
ObsMessageType_PackageBuildUnchanged = "package.build_unchanged"
ObsMessageType_RepoBuildFinished = "repo.build_finished"
ObsMessageType_RepoBuildStarted = "repo.build_started"
)
type BuildResultMsg struct {
Status string
Project string `json:"project"`
Package string `json:"package"`
Repo string `json:"repository"`
Arch string `json:"arch"`
StartTime int32 `json:"starttime"`
EndTime int32 `json:"endtime"`
WorkerID string `json:"workerid"`
Version string `json:"versrel"`
Build string `json:"buildtype"`
}
type RepoBuildMsg struct {
Status string
Project string `json:"project"`
Repo string `json:"repo"`
Arch string `json:"arch"`
BuildId string `json:"buildid"`
}
var ObsRabbitMessageError_UnknownMessageType error = errors.New("Unknown message type")
var ObsRabbitMessageError_ParseError error = errors.New("JSON parsing error")
func ParseObsRabbitMessaege(ObsMessageType string, data []byte) (interface{}, error) {
unmarshall := func(data []byte, v any) (interface{}, error) {
if err := json.Unmarshal(data, v); err != nil {
return nil, fmt.Errorf("%w: %s", ObsRabbitMessageError_ParseError, err)
}
return v, nil
}
switch ObsMessageType {
case ObsMessageType_PackageBuildSuccess, ObsMessageType_PackageBuildUnchanged:
ret := &BuildResultMsg{Status: "succeeded"}
return unmarshall(data, ret)
case ObsMessageType_PackageBuildFail:
ret := &BuildResultMsg{Status: "failed"}
return unmarshall(data, ret)
case ObsMessageType_RepoBuildFinished:
ret := &RepoBuildMsg{Status: "finished"}
return unmarshall(data, ret)
case ObsMessageType_RepoBuildStarted:
ret := &RepoBuildMsg{Status: "building"}
return unmarshall(data, ret)
}
return nil, fmt.Errorf("%w: %s", ObsRabbitMessageError_UnknownMessageType, ObsMessageType)
}
type ObsMessageProcessor func(topic string, data []byte) error
type RabbitMQObsBuildStatusProcessor struct {
Handlers map[string]ObsMessageProcessor
c *RabbitConnection
}
func (o *RabbitMQObsBuildStatusProcessor) routingKeyPrefix() string {
if strings.HasSuffix(o.c.RabbitURL.Hostname(), "opensuse.org") {
return "opensuse"
}
return "suse"
}
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
prefix := o.routingKeyPrefix()
msgs := make([]string, len(o.Handlers))
idx := 0
for k, _ := range o.Handlers {
msgs[idx] = prefix + ".obs." + k
idx++
}
slices.Sort(msgs)
return msgs
}
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
if o.c == nil {
o.c = &RabbitConnection{}
}
return o.c
}
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
prefix := o.routingKeyPrefix() + ".obs."
topic := strings.TrimPrefix(msg.RoutingKey, prefix)
if h, ok := o.Handlers[topic]; ok {
return h(topic, msg.Body)
}
return fmt.Errorf("Unhandled message received: %s", msg.RoutingKey)
}

View File

@@ -25,39 +25,26 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, r
return nil, err
}
reviews := make([]*models.PullReview, 0, len(reviewers))
reviews := make([]*models.PullReview, 0, 10)
var comments []*models.TimelineComment
alreadyHaveUserReview := func(user string) bool {
for _, r := range reviews {
if r.User != nil && r.User.UserName == user {
return true
}
}
return false
}
for idx, item := range timeline {
if item.Type == TimelineCommentType_Review {
for _, r := range rawReviews {
if r.ID == item.ReviewID {
if !alreadyHaveUserReview(r.User.UserName) {
reviews = append(reviews, r)
}
reviews = append(reviews, r)
break
}
}
} else if item.Type == TimelineCommentType_Comment {
comments = append(comments, item)
} else if item.Type == TimelineCommentType_PushPull {
LogDebug("cut-off", item.Created)
timeline = timeline[0:idx]
break
} else {
LogDebug("Unhandled timeline type:", item.Type)
}
}
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
return &PRReviews{
reviews: reviews,
@@ -85,7 +72,6 @@ func (r *PRReviews) IsManualMergeOK() bool {
if c.Updated != c.Created {
continue
}
LogDebug("comment:", c.User.UserName, c.Body)
if slices.Contains(r.reviewers, c.User.UserName) {
if bodyCommandManualMergeOK(c.Body) {
return true

View File

@@ -113,10 +113,6 @@ func (s *Submodule) parseKeyValue(line string) error {
return nil
}
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
return manifest.SubdirForPackage(s.Path)
}
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
data, err := io.ReadAll(reader)
if err != nil {

View File

@@ -173,3 +173,4 @@ func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
return "", DevelProjectNotFound
}

View File

@@ -73,10 +73,6 @@ func runObsCommand(args ...string) ([]string, error) {
var DebugMode bool
func giteaPackage(pkg string) string {
return strings.ReplaceAll(pkg, "+", "_")
}
func projectMaintainer(obs *common.ObsClient, prj string) ([]string, []string) { // users, groups
meta, err := obs.GetProjectMeta(prj)
if err != nil {
@@ -190,16 +186,13 @@ func cloneDevel(git common.Git, gitDir, outName, urlString string) error {
}
func importRepos(packages []string) {
RepoToObsName := make(map[string]string)
factoryRepos := make([]*models.Repository, 0, len(packages)*2)
develProjectPackages := make([]string, 0, len(packages))
for _, pkg := range packages {
src_pkg_name := strings.Split(pkg, ":")
RepoToObsName[giteaPackage(src_pkg_name[0])] = src_pkg_name[0]
repo, err := client.Repository.RepoGet(
repository.NewRepoGetParams().
WithDefaults().WithOwner("pool").WithRepo(giteaPackage(src_pkg_name[0])),
WithDefaults().WithOwner("pool").WithRepo(src_pkg_name[0]),
r.DefaultAuthentication)
if err != nil {
@@ -226,7 +219,7 @@ func importRepos(packages []string) {
oldPackageNames := make([]string, 0, len(factoryRepos))
for _, repo := range factoryRepos {
oldPackageNames = append(oldPackageNames, RepoToObsName[repo.Name])
oldPackageNames = append(oldPackageNames, repo.Name)
}
// fork packags from pool
@@ -248,60 +241,44 @@ func importRepos(packages []string) {
log.Println("adding remotes...")
for i := 0; i < len(factoryRepos); i++ {
pkg := factoryRepos[i]
pkgName := RepoToObsName[pkg.Name]
gitName := pkg.Name
// verify that package was created by `git-importer`, or it's scmsync package and clone it
fi, err := os.Stat(filepath.Join(git.GetPath(), gitName))
fi, err := os.Stat(filepath.Join(git.GetPath(), pkg.Name))
if os.IsNotExist(err) {
if slices.Contains(develProjectPackages, pkgName) {
if slices.Contains(develProjectPackages, pkg.Name) {
// failed import of former factory package
log.Println("Failed to import former factory pkg:", pkgName)
continue
}
// scmsync?
devel_project, err := devel_projects.GetDevelProject(pkgName)
devel_project, err := devel_projects.GetDevelProject(pkg.Name)
if err != nil {
log.Panicln("devel project not found for", RepoToObsName[pkg.Name], "err:", err)
log.Panicln("devel project not found for", pkg.Name, "err:", err)
}
meta, _ := obs.GetPackageMeta(devel_project, pkgName)
meta, _ := obs.GetPackageMeta(devel_project, pkg.Name)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", gitName, meta.ScmSync); err != nil {
if err2 := cloneDevel(git, "", pkg.Name, meta.ScmSync); err != nil {
log.Panicln(err2)
}
if err2 := git.GitExec(gitName, "checkout", "-B", "main"); err2 != nil {
git.GitExecOrPanic(gitName, "checkout", "-B", "master")
}
git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main")
continue
}
// try again, should now exist
if fi, err = os.Stat(filepath.Join(git.GetPath(), gitName)); err != nil {
if fi, err = os.Stat(filepath.Join(git.GetPath(), pkg.Name)); err != nil {
log.Panicln(err)
}
} else if err != nil {
log.Panicln(err)
} else {
// verify that we do not have scmsync for imported packages
meta, err := obs.GetPackageMeta(prj, pkgName)
meta, err := obs.GetPackageMeta(prj, pkg.Name)
if err != nil {
log.Panicln(err)
}
if len(meta.ScmSync) > 0 {
u, err := url.Parse(meta.ScmSync)
if err != nil {
log.Println("Invlid scmsync in", pkg, meta.ScmSync, err)
}
o, err := url.Parse(strings.TrimSpace(git.GitExecWithOutputOrPanic(gitName, "remote", "get-url", "origin")))
log.Println("Invlid scmsync in git repo", pkg, meta.ScmSync, err)
if u.Host != o.Host || u.Path != u.Path {
log.Panicln("importing an scmsync package??:", prj, gitName)
} else {
log.Println("previous SCMSYNC package. Pull.")
git.GitExecOrPanic(gitName, "pull", "origin", "HEAD:main")
}
log.Panicln("importing an scmsync package??:", prj, pkg.Name)
}
}
@@ -310,11 +287,11 @@ func importRepos(packages []string) {
}
// add remote repos
out := git.GitExecWithOutputOrPanic(gitName, "remote", "show", "-n")
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show", "-n")
switch pkg.Owner.UserName {
case "pool":
if !slices.Contains(strings.Split(out, "\n"), "pool") {
out := git.GitExecWithOutputOrPanic(gitName, "remote", "add", "pool", pkg.CloneURL)
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "add", "pool", pkg.CloneURL)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
@@ -421,22 +398,12 @@ func importRepos(packages []string) {
for i := 0; i < len(develProjectPackages); i++ {
pkg := develProjectPackages[i]
meta, err := obs.GetPackageMeta(prj, pkg)
if err != nil {
meta, err = obs.GetPackageMeta(prj, pkg)
if err != nil {
log.Println("Error fetching pkg meta for:", prj, pkg, err)
}
}
if meta == nil {
log.Println(" **** pkg meta is nil? ****")
} else if len(meta.ScmSync) > 0 {
if _, err := os.Stat(path.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
meta, _ := obs.GetPackageMeta(prj, pkg)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
continue
} else {
common.PanicOnError(gitImporter(prj, pkg))
@@ -498,7 +465,7 @@ func importRepos(packages []string) {
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg.Name, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkgName, "fetch", "devel")
// git.GitExecOrPanic(pkg.Name, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg.Name, "lfs", "fetch", "--all")
@@ -506,8 +473,8 @@ func importRepos(packages []string) {
}
git.GitExecOrPanic(pkg.Name, "push", "develorigin", "main", "-f")
git.GitExec(pkg.Name, "push", "develorigin", "--delete", "factory", "devel")
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(repo.Name)).WithBody(&models.EditRepoOption{
// git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
HasPullRequests: true,
@@ -532,13 +499,12 @@ func importRepos(packages []string) {
for _, pkg := range develProjectPackages {
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
giteaPkg := giteaPackage(pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
_, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &giteaPkg,
Name: &pkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
@@ -548,7 +514,7 @@ func importRepos(packages []string) {
log.Panicln("Error creating new package repository:", pkg, err)
}
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPkg).WithBody(
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(
&models.EditRepoOption{
HasPullRequests: true,
HasPackages: false,
@@ -588,7 +554,7 @@ func importRepos(packages []string) {
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
git.GitExec(pkg, "push", "develorigin", "--delete", "factory", "devel")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.EditRepoOption{
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
}), r.DefaultAuthentication)
@@ -687,7 +653,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
missing := []string{}
uids := make([]common.PersonRepoMeta, len(orig_uids))
copy(uids, orig_uids)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
if err != nil {
if errors.Is(err, &repository.RepoListCollaboratorsNotFound{}) {
return missing
@@ -708,7 +674,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
log.Println("missing collabs for", pkg, ":", uids)
}
for _, u := range uids {
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.AddCollaboratorOption{
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(pkg).WithBody(&models.AddCollaboratorOption{
Permission: "write",
}).WithCollaborator(u.UserID), r.DefaultAuthentication)
@@ -843,14 +809,14 @@ func createPrjGit() {
if err != nil {
log.Panicln(err)
}
file.WriteString("{\n // Reference build project\n \"ObsProject\": \"" + prj + "\",\n}\n")
file.WriteString("{\n // Reference build project\n \"ObsProject\": \""+prj+"\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "staging.config")
if file, err = os.Create(path.Join(git.GetPath(), common.DefaultGitPrj, "workflow.config")); err != nil {
log.Panicln(err)
}
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \"" + org + "\",\n}\n")
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \""+org+"\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "workflow.config")
}
@@ -891,7 +857,6 @@ func main() {
syncMaintainers := flags.Bool("sync-maintainers-only", false, "Sync maintainers to Gitea and exit")
flags.BoolVar(&forceBadPool, "bad-pool", false, "Force packages if pool has no branches due to bad import")
flags.BoolVar(&forceNonPoolPackages, "non-pool", false, "Allow packages that are not in pool to be created. WARNING: Can't add to factory later!")
specificPackage := flags.String("package", "", "Process specific package only, ignoring the others")
if help := flags.Parse(os.Args[1:]); help == flag.ErrHelp || flags.NArg() != 2 {
printHelp(helpString.String())
@@ -988,15 +953,11 @@ func main() {
if *purgeOnly {
log.Println("Purging repositories...")
for _, pkg := range packages {
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
}
os.Exit(10)
}
if len(*specificPackage) != 0 {
importRepos([]string{*specificPackage})
return
}
importRepos(packages)
syncMaintainersToGitea(packages)
}

View File

@@ -1,46 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"io"
"os"
"github.com/tailscale/hujson"
)
type Config struct {
ForgeEndpoint string `json:"forge_url"`
Keys []string `json:"keys"`
}
type contextKey string
const configKey contextKey = "config"
func ReadConfig(reader io.Reader) (*Config, error) {
data, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("error reading config data: %w", err)
}
config := Config{}
data, err = hujson.Standardize(data)
if err != nil {
return nil, fmt.Errorf("failed to parse json: %w", err)
}
if err := json.Unmarshal(data, &config); err != nil {
return nil, fmt.Errorf("error parsing json to api keys and target url: %w", err)
}
return &config, nil
}
func ReadConfigFile(filename string) (*Config, error) {
file, err := os.Open(filename)
if err != nil {
return nil, fmt.Errorf("cannot open config file for reading. err: %w", err)
}
defer file.Close()
return ReadConfig(file)
}

View File

@@ -1,15 +0,0 @@
package main
import (
"context"
"net/http"
)
func ConfigMiddleWare(cfg *Config) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := context.WithValue(r.Context(), configKey, cfg)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
}

View File

@@ -1,169 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"net/http"
"os"
"slices"
"strings"
"src.opensuse.org/autogits/common"
)
type Status struct {
Context string `json:"context"`
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
type StatusInput struct {
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
func main() {
configFile := flag.String("config", "", "status proxy config file")
flag.Parse()
if *configFile == "" {
common.LogError("missing required argument config")
return
}
config, err := ReadConfigFile(*configFile)
if err != nil {
common.LogError("Failed to read config file", err)
return
}
mux := http.NewServeMux()
mux.Handle("/repos/{owner}/{repo}/statuses/{sha}", ConfigMiddleWare(config)(http.HandlerFunc(StatusProxy)))
common.LogInfo("server up and listening on :3000")
err = http.ListenAndServe(":3000", mux)
if err != nil {
common.LogError("Server failed to start up", err)
}
}
func StatusProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
config, ok := r.Context().Value(configKey).(*Config)
if !ok {
common.LogError("Config missing from context")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
header := r.Header.Get("Authorization")
if header == "" {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token_arr := strings.Split(header, " ")
if len(token_arr) != 2 {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
if !strings.EqualFold(token_arr[0], "Bearer") {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token := token_arr[1]
if !slices.Contains(config.Keys, token) {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
owner := r.PathValue("owner")
repo := r.PathValue("repo")
sha := r.PathValue("sha")
if !ok {
common.LogError("Failed to get config from context, is it set?")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
}
posturl := fmt.Sprintf("%s/repos/%s/%s/statuses/%s", config.ForgeEndpoint, owner, repo, sha)
decoder := json.NewDecoder(r.Body)
var statusinput StatusInput
err := decoder.Decode(&statusinput)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
status := Status{
Context: "Build in obs",
State: statusinput.State,
TargetUrl: statusinput.TargetUrl,
}
status_payload, err := json.Marshal(status)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
client := &http.Client{}
req, err := http.NewRequest("POST", posturl, bytes.NewBuffer(status_payload))
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
ForgeToken := os.Getenv("GITEA_TOKEN")
if ForgeToken == "" {
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
common.LogError("GITEA_TOKEN was not set, all requests will fail")
return
}
req.Header.Add("Content-Type", "Content-Type")
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
resp, err := client.Do(req)
if err != nil {
common.LogError(fmt.Sprintf("Request to forge endpoint failed: %v", err))
http.Error(w, http.StatusText(http.StatusBadGateway), http.StatusBadGateway)
return
}
defer resp.Body.Close()
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(resp.StatusCode)
/*
the commented out section sets every key
value from the headers, unsure if this
leaks information from gitea
for k, v := range resp.Header {
for _, vv := range v {
w.Header().Add(k, vv)
}
}
*/
_, err = io.Copy(w, resp.Body)
if err != nil {
common.LogError("Error copying response body: %v", err)
}
} else {
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
return
}
}

View File

@@ -11,7 +11,6 @@ import (
"strconv"
"strings"
"time"
"unicode"
"src.opensuse.org/autogits/common"
"src.opensuse.org/autogits/common/gitea-generated/models"
@@ -22,10 +21,9 @@ var acceptRx *regexp.Regexp
var rejectRx *regexp.Regexp
var groupName string
func InitRegex(newGroupName string) {
groupName = newGroupName
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
rejectRx = regexp.MustCompile("^:\\s*")
func InitRegex(groupName string) {
acceptRx = regexp.MustCompile("\\s*:\\s*LGTM")
rejectRx = regexp.MustCompile("\\s*:\\s*")
}
func ParseReviewLine(reviewText string) (bool, string) {
@@ -36,18 +34,7 @@ func ParseReviewLine(reviewText string) (bool, string) {
return false, line
}
l := line[glen:]
for idx, r := range l {
if unicode.IsSpace(r) {
continue
} else if r == ':' {
return true, l[idx:]
} else {
return false, line
}
}
return false, line
return true, line[glen:]
}
func ReviewAccepted(reviewText string) bool {

View File

@@ -2,76 +2,6 @@ package main
import "testing"
func TestReviewApprovalCheck(t *testing.T) {
tests := []struct {
Name string
GroupName string
InString string
Approved bool
Rejected bool
}{
{
Name: "Empty String",
GroupName: "group",
InString: "",
},
{
Name: "Random Text",
GroupName: "group",
InString: "some things LGTM",
},
{
Name: "Group name with Random Text means disapproval",
GroupName: "group",
InString: "@group: some things LGTM",
Rejected: true,
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "LGTM approval",
GroupName: "group2",
InString: "@group2: LGTM",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approved",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approve",
Approved: true,
},
{
Name: "disapproval",
GroupName: "group2",
InString: "@group2: disapprove",
Rejected: true,
},
}
func TestReviews(t *testing.T) {
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
InitRegex(test.GroupName)
if r := ReviewAccepted(test.InString); r != test.Approved {
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
}
if r := ReviewRejected(test.InString); r != test.Rejected {
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
}
})
}
}

View File

@@ -263,7 +263,7 @@ func ProcessRepoBuildStatus(results, ref []common.PackageBuildStatus) (status Bu
return BuildStatusSummarySuccess, SomeSuccess
}
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string) (*common.ProjectMeta, error) {
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
common.LogDebug("repo content fetching ...")
err := FetchPrGit(git, pr)
if err != nil {
@@ -289,7 +289,15 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
}
meta, err := ObsClient.GetProjectMeta(buildPrj)
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
if err == nil {
// success, so we use that staging master project as our build project
buildPrj = stagingMasterPrj
} else {
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
meta, err = ObsClient.GetProjectMeta(buildPrj)
}
if err != nil {
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
return nil, err
@@ -414,7 +422,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
var state RequestModification = RequestModificationSourceChanged
if meta == nil {
// new build
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject)
common.LogDebug(" Staging master:", config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
if err != nil {
return RequestModificationNoChange, err
}
@@ -428,6 +437,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
} else {
err = ObsClient.SetProjectMeta(meta)
if err != nil {
x, _ := xml.MarshalIndent(meta, "", " ")
common.LogDebug(" meta:", string(x))
common.LogError("cannot create meta project:", err)
return RequestModificationNoChange, err
}
@@ -643,7 +654,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogError("No PR associated with review:", org, "/", repo, "#", id, "Error:", err)
return true, err
}
common.LogDebug("PR state:", pr.State)
if pr.State == "closed" {
// dismiss the review
@@ -660,40 +670,68 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
}
if review, err := FetchOurLatestActionableReview(gitea, org, repo, id); err == nil {
common.LogInfo("processing review", review.HTMLURL, "state", review.State)
// Fetching data
review, review_error := FetchOurLatestActionableReview(gitea, org, repo, id)
if pr.State != "closed" && review_error != nil {
// Nothing to do
return true, nil
}
err = FetchPrGit(git, pr)
if err != nil {
common.LogError("Cannot fetch PR git:", pr.URL)
return false, err
err = FetchPrGit(git, pr)
if err != nil {
common.LogError("Cannot fetch PR git:", pr.URL)
return false, err
}
// we want the possibly pending modification here, in case stagings are added, etc.
// jobs of review team to deal with issues
common.LogDebug("QA configuration fetching ...", common.StagingConfigFile)
data, err := git.GitCatFile(pr.Head.Sha, pr.Head.Sha, common.StagingConfigFile)
if err != nil {
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
if !IsDryRun {
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
}
return true, err
}
// we want the possibly pending modification here, in case stagings are added, etc.
// jobs of review team to deal with issues
common.LogDebug("QA configuration fetching ...", common.StagingConfigFile)
data, err := git.GitCatFile(pr.Head.Sha, pr.Head.Sha, common.StagingConfigFile)
if err != nil {
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
if !IsDryRun {
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
}
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
}
if stagingConfig.ObsProject == "" {
common.LogError("Cannot find reference project for PR#", pr.Index)
if !IsDryRun && review_error == nil {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
return true, err
}
return true, nil
}
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
}
common.LogDebug("ObsProject:", stagingConfig.ObsProject)
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, ObsClient.HomeProject, pr)
if stagingConfig.ObsProject == "" {
common.LogError("Cannot find reference project for PR#", pr.Index)
// Cleanup projects
if pr.State == "closed" {
// review is done, cleanup
common.LogInfo(" -- closed request, cleanup staging projects")
for _, setup := range stagingConfig.QA {
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
return true, err
ObsClient.DeleteProject(stagingProject + ":" + setup.Name)
}
return true, nil
}
if stagingProject != "" {
if !IsDryRun {
ObsClient.DeleteProject(stagingProject)
}
}
return true, nil
}
// Process review aka setup projects
if review_error == nil {
common.LogInfo("processing review", review.HTMLURL, "state", review.State)
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
if err != nil {
@@ -727,16 +765,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
}
if stagingConfig.StagingProject != "" {
// staging project must either be nothing or be *under* the target project.
// other setups are currently not supported
// NOTE: this is user input, so we need some limits here
l := len(stagingConfig.ObsProject)
if l >= len(stagingConfig.StagingProject) || stagingConfig.ObsProject != stagingConfig.StagingProject[0:l] {
common.LogError("StagingProject (", stagingConfig.StagingProject, ") is not child of target project", stagingConfig.ObsProject)
}
}
if meta.Name != stagingConfig.ObsProject {
common.LogError("staging.config . ObsProject:", stagingConfig.ObsProject, " is not target project name", meta.Name)
if !IsDryRun {
@@ -757,17 +785,22 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogDebug(" # head submodules:", len(headSubmodules))
common.LogDebug(" # base submodules:", len(baseSubmodules))
modifiedOrNew := make([]string, 0, 16)
modifiedPackages := make([]string, 0, 16)
newPackages := make([]string, 0, 16)
if !stagingConfig.RebuildAll {
for pkg, headOid := range headSubmodules {
if baseOid, exists := baseSubmodules[pkg]; !exists || baseOid != headOid {
modifiedOrNew = append(modifiedOrNew, pkg)
if len(baseOid) > 0 {
modifiedPackages = append(modifiedPackages, pkg)
} else {
newPackages = append(newPackages, pkg)
}
common.LogDebug(pkg, ":", baseOid, "->", headOid)
}
}
}
if len(modifiedOrNew) == 0 {
if len(modifiedPackages) == 0 && len(newPackages) == 0 {
rebuild_all := false || stagingConfig.RebuildAll
reviews, err := gitea.GetPullRequestReviews(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
@@ -844,13 +877,13 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
gitea.AddComment(pr, msg)
}
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedOrNew...)
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedPackages...)
if err != nil {
common.LogError("failed fetching ref project status for", stagingConfig.ObsProject, ":", err)
}
stagingResult, err := ObsClient.BuildStatus(stagingProject)
if err != nil {
common.LogError("failed fetching ref project status for", stagingProject, ":", err)
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
}
buildStatus := ProcessBuildStatus(stagingResult, baseResult)

View File

@@ -22,7 +22,6 @@ import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
@@ -79,7 +78,7 @@ func ProjectStatusSummarySvg(project string) []byte {
return ret.Bytes()
}
func PackageStatusSummarySvg(status *common.PackageBuildStatus) []byte {
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
if !ok {
buildStatus = common.ObsBuildStatusDetails["error"]
@@ -109,10 +108,8 @@ func main() {
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
obsHost := flag.String("obs-host", "https://api.opensuse.org", "OBS API endpoint for package status information")
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
flag.Parse()
common.PanicOnError(common.RequireObsSecretToken())
@@ -146,25 +143,21 @@ func main() {
res.Header().Add("content-type", "image/svg+xml")
status := GetDetailedBuildStatus(prj, pkg, repo, arch)
res.Write(PackageStatusSummarySvg(status))
})
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}/buildlog", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
data, err := obs.BuildLog(prj, pkg, repo, arch)
if err != nil {
res.WriteHeader(http.StatusInternalServerError)
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
return
}
defer data.Close()
io.Copy(res, data)
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
for _, status := range r.Status {
if status.Package == pkg {
res.Write(PackageStatusSummarySvg(status))
return
}
}
}
}
})
go ProcessUpdates()

View File

@@ -1,3 +0,0 @@
package main

View File

@@ -1,8 +1,8 @@
package main
import (
"log"
"slices"
"strings"
"sync"
"time"
@@ -24,86 +24,14 @@ type StatusUpdateMsg struct {
func GetCurrentStatus(project string) *common.BuildResultList {
statusMutex.RLock()
defer statusMutex.RUnlock()
if ret, found := CurrentStatus[project]; found {
statusMutex.RUnlock()
return ret
}
res, err := obs.BuildStatus(project)
statusMutex.RUnlock()
statusMutex.Lock()
defer statusMutex.Unlock()
if err != nil {
return res
}
CurrentStatus[project] = res
now := time.Now().Unix()
CurrentStatus[project].LastUpdate = now
for _, r := range res.Result {
r.LastUpdate = now
for _, p := range r.Status {
p.LastUpdate = now
}
slices.SortFunc(r.Status, packageSort)
}
slices.SortFunc(res.Result, repoSort)
return res
}
func updatePrjPackage(prjState *common.BuildResultList, pkg string, now int64, pkgState *common.BuildResultList) {
for prjState.
Result[0].Status[0].Package
}
func extractPackageBuildStatus(prjState *common.BuildResultList, pkg string) []*common.PackageBuildStatus {
}
func GetDetailedPackageBuildStatus(prj, pkg string) []*common.PackageBuildStatus {
statusMutex.RLock()
now := time.Now().Unix()
cachedPrj, found := CurrentStatus[prj]
if found {
statusMutex.Unlock()
if now-cachedPrj.LastUpdate < 60 {
return extractPackageBuildStatus(cachedPrj, pkg)
}
}
ret, err := obs.BuildStatus(prj, pkg)
if err != nil {
} else {
go WatchObsProject(obs, project)
return nil
}
statusMutex.Lock()
defer statusMutex.Unlock()
updatePrjPackage(cachedPrj, pkg, now, ret)
return extractPackageBuildStatus(cachedPrj, pkg)
}
func GetDetailedBuildStatus(prj, pkg, repo, arch string) *common.PackageBuildStatus {
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
return nil
}
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
for _, status := range r.Status {
if status.Package == pkg {
return &status
}
}
}
}
return nil
}
func ProcessUpdates() {
@@ -125,3 +53,30 @@ func ProcessUpdates() {
}
}
}
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
old_state := ""
mutex.Lock()
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
mutex.Unlock()
return
} else {
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
mutex.Unlock()
}
LogDebug("+ watching", ObsProject)
opts := common.BuildResultOptions{}
for {
state, err := obs.BuildStatusWithState(ObsProject, &opts)
if err != nil {
log.Println(" *** Error fetching build for", ObsProject, err)
time.Sleep(time.Minute)
} else {
opts.OldState = state.State
LogDebug(" --> update", ObsProject, " => ", old_state)
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
}
}
}

View File

@@ -526,7 +526,7 @@ func main() {
log.Fatal(err)
}
defs := &common.RabbitMQGiteaEventsProcessor{}
var defs common.ListenDefinitions
var err error
if len(*basePath) == 0 {
@@ -557,7 +557,7 @@ func main() {
}
log.Println("*** Reconfiguring ***")
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().UpdateTopics(defs)
defs.UpdateTopics()
}
}()
signal.Notify(signalChannel, syscall.SIGHUP)
@@ -573,17 +573,18 @@ func main() {
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
defs.GitAuthor = GitAuthor
defs.RabbitURL, err = url.Parse(*rabbitUrl)
if err != nil {
log.Panicf("cannot parse server URL. Err: %#v\n", err)
}
go consistencyCheckProcess()
log.Println("defs:", *defs)
log.Println("defs:", defs)
defs.Handlers = make(map[string]common.RequestProcessor)
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
log.Fatal(common.ProcessRabbitMQEvents(defs))
log.Fatal(defs.ProcessRabbitMQEvents())
}

View File

@@ -162,9 +162,9 @@ func main() {
checker := CreateDefaultStateChecker(*checkOnStart, req, Gitea, time.Duration(*checkIntervalHours)*time.Hour)
go checker.ConsistencyCheckProcess()
listenDefs := &common.RabbitMQGiteaEventsProcessor{
listenDefs := common.ListenDefinitions{
Orgs: orgs,
// GitAuthor: GitAuthor,
GitAuthor: GitAuthor,
Handlers: map[string]common.RequestProcessor{
common.RequestType_PR: req,
common.RequestType_PRSync: req,
@@ -172,7 +172,7 @@ func main() {
common.RequestType_PRReviewRejected: req,
},
}
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
listenDefs.RabbitURL, _ = url.Parse(*rabbitUrl)
common.PanicOnError(common.ProcessRabbitMQEvents(listenDefs))
common.PanicOnError(listenDefs.ProcessRabbitMQEvents())
}

View File

@@ -6,7 +6,6 @@ import (
"fmt"
"path"
"runtime/debug"
"slices"
"strings"
"github.com/opentracing/opentracing-go/log"
@@ -18,33 +17,6 @@ func prGitBranchNameForPR(repo string, prNo int) string {
return fmt.Sprintf("PR_%s#%d", repo, prNo)
}
func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
title_refs := make([]string, 0, len(prset.PRs)-1)
refs := make([]string, 0, len(prset.PRs)-1)
for _, pr := range prset.PRs {
org, repo, idx := pr.PRComponents()
title_refs = append(title_refs, repo)
ref := fmt.Sprintf(common.PrPattern, org, repo, idx)
refs = append(refs, ref)
}
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ",\n")
if prset.Config.ManualMergeOnly {
desc = desc + "\n\nManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
}
if prset.Config.ManualMergeProject {
desc = desc + "\nManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
}
if !prset.Config.ManualMergeOnly && !prset.Config.ManualMergeProject {
desc = desc + "\nAutomatic merge enabled. This will merge when all review requirements are satisfied."
}
return
}
func verifyRepositoryConfiguration(repo *models.Repository) error {
if repo.AutodetectManualMerge && repo.AllowManualMerge {
return nil
@@ -108,40 +80,41 @@ func AllocatePRProcessor(req *common.PullRequestWebhookEvent, configs common.Aut
}
common.LogDebug("git path:", git.GetPath())
// git.GitExecOrPanic("", "config", "set", "--global", "advice.submoduleMergeConflict", "false")
// git.GitExecOrPanic("", "config", "set", "--global", "advice.mergeConflict", "false")
return &PRProcessor{
config: config,
git: git,
}, nil
}
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string, []string, error) {
git := pr.git
subList, err := git.GitSubmoduleList(common.DefaultGitPrj, "HEAD")
if err != nil {
common.LogError("Error fetching submodule list for PrjGit", err)
return err
return nil, nil, err
}
refs := make([]string, 0, len(prset.PRs))
title_refs := make([]string, 0, len(prset.PRs))
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
org, repo, idx := pr.PRComponents()
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
prHead := pr.PR.Head.Sha
revert := false
if pr.PR.State != "open" {
prjGitPR, err := prset.GetPrjGitPR()
prjGitPR, err := prset.GetPrjGitPR()
if prjGitPR != nil {
// remove PR from PrjGit
var valid bool
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
common.LogError("Failed fetching original submodule commit id for repo")
return err
return nil, nil, err
}
}
revert = true
@@ -158,6 +131,9 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
if revert {
commitMsg = fmt.Sprintln("auto-created for", repo, "\n\nThis commit was autocreated by", GitAuthor, "removing\n", ref)
} else {
refs = append(refs, ref)
title_refs = append(title_refs, repo)
}
updateSubmoduleInPR(submodulePath, prHead, git)
@@ -176,7 +152,7 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
common.LogError("Failed to find expected repo:", repo)
}
}
return nil
return title_refs, refs, nil
}
func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet) error {
@@ -187,16 +163,17 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
common.LogError("Failed to fetch PrjGit repository data.", PrjGitOrg, PrjGitRepo, err)
return err
}
RemoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
remoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, RemoteName+"/"+PrjGitBranch)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, remoteName+"/"+PrjGitBranch)
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -206,9 +183,11 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
}
if !common.IsDryRun && headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
title, desc := PrjGitDescription(prset)
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch,
"Forwarded PRs: "+strings.Join(title_refs, ", "),
fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor)+strings.Join(refs, ", "),
)
if err != nil {
common.LogError("Error creating PrjGit PR:", err)
return err
@@ -217,40 +196,12 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
RemoveDeadline: true,
})
prinfo := prset.AddPR(pr)
prinfo.RemoteName = RemoteName
prset.AddPR(pr)
}
return nil
}
func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch string) error {
git := pr.git
PrjGitPR, err := prset.GetPrjGitPR()
common.PanicOnError(err)
remoteBranch := PrjGitPR.RemoteName + "/" + branch
common.LogDebug("Rebasing on top of", remoteBranch)
for conflict := git.GitExec(common.DefaultGitPrj, "rebase", remoteBranch); conflict != nil; {
statuses, err := git.GitStatus(common.DefaultGitPrj)
if err != nil {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
common.PanicOnError(err)
}
for _, s := range statuses {
if s.SubmoduleChanges != "S..." {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
return fmt.Errorf("Unexpected conflict in rebase. %s", s)
}
}
conflict = git.GitExec(common.DefaultGitPrj, "rebase", "--skip")
}
return nil
}
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
_, _, PrjGitBranch := prset.Config.GetPrjGit()
PrjGitPR, err := prset.GetPrjGitPR()
if err != nil {
common.LogError("Updating PrjGitPR but not found?", err)
@@ -261,23 +212,16 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
PrjGit := PrjGitPR.PR.Base.Repo
prjGitPRbranch := PrjGitPR.PR.Head.Name
PrjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
remoteName, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitBranch)
forcePush := false
// trust Gitea here on mergeability
if !PrjGitPR.PR.Mergeable {
common.PanicOnError(pr.RebaseAndSkipSubmoduleCommits(prset, PrjGitBranch))
forcePush = true
}
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -287,14 +231,12 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
}
if !common.IsDryRun && headCommit != newHeadCommit {
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
// update PR
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
PrjGitTitle := "Forwarded PRs: " + strings.Join(title_refs, ", ")
PrjGitBody := fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ", ")
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
RemoveDeadline: true,
Title: PrjGitTitle,
@@ -340,7 +282,9 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
common.LogInfo("PR State is closed:", prjGitPR.PR.State)
for _, pr := range prset.PRs {
if pr.PR.State == "open" {
org, repo, idx := pr.PRComponents()
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
State: "closed",
})
@@ -349,14 +293,6 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
return nil
}
if len(prset.PRs) > 1 {
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
}
}
if err = pr.UpdatePrjGitPR(prset); err != nil {
return err
}
@@ -375,10 +311,13 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
// make sure that prjgit is consistent and only submodules that are to be *updated*
// reset anything that changed that is not part of the prset
// package removals/additions are *not* counted here
org, repo, branch := config.GetPrjGit()
if pr, err := prset.GetPrjGitPR(); err == nil {
common.LogDebug("Submodule parse begin")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.RemoteName+"/"+branch) // merge base must remote branch, checked in prjgit udate
remote, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, pr.PR.Base.Repo.CloneURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", remote, pr.PR.MergeBase, pr.PR.Head.Ref)
common.LogDebug("Fetch done")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.MergeBase)
common.PanicOnError(err)
new_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.Head.Sha)
common.PanicOnError(err)
@@ -415,6 +354,7 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
}
common.LogDebug(" num of reviewers:", len(prjGitPR.PR.RequestedReviewers))
org, repo, branch := config.GetPrjGit()
maintainers, err := common.FetchProjectMaintainershipData(Gitea, org, repo, branch)
if err != nil {
return err