2
1
forked from adamm/autogits

10 Commits

Author SHA256 Message Date
ec5ac4fca3 Don't fail on project git pull request creation. 2025-07-11 10:53:38 +02:00
d7132727a7 Create Pull Requests to specified branches
instead of always using DefaultBranch. This means that target needs
always gets specified now.
2025-07-11 10:52:00 +02:00
74f40f536a message typo 2025-07-11 09:40:36 +02:00
cde46e85f3 Enable code stream publishing 2025-07-11 09:40:36 +02:00
4378568953 Fix logic in crash protection
We must not access review.User object if it is nil
2025-07-11 09:40:36 +02:00
c286e12b67 Try to use Staging Master Project as default build target if available
This allows us to set custom build configuration or repository sets for
pull request projects.
2025-07-11 09:40:36 +02:00
cc9ad1703d Don't crash when new packages got added
The build result request of the base project is failing in this
situation, since the requested package does not exist.

Therefore we need to have seperate lists for proper handling.
2025-07-11 09:40:36 +02:00
2f8b6b4ade Temporary hack to include also changed directories
Need to be clean'd up via proper subdir handling
2025-07-11 09:40:36 +02:00
c11def6005 handle build results different when request with lastbuild=1
In that case we need to

 * ignore repo state as it is the current one. There is no last state
 * handle "unkown" state as finished as the package was never attempted,
   but we don't know the reason (eg. broken source or unresolvable)
2025-07-11 09:40:36 +02:00
0bee48472d Implementing cleanup of closed requests 2025-07-11 09:40:36 +02:00
35 changed files with 691 additions and 1800 deletions

View File

@@ -59,7 +59,6 @@ type AutogitConfig struct {
Reviewers []string // only used by `pr` workflow
ReviewGroups []ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers

View File

@@ -769,8 +769,6 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
done.Lock()
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
LogDebug("Getting submodules for:", commitId)
go func() {
defer done.Unlock()
defer close(data_out.ch)
@@ -804,6 +802,7 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
for _, te := range tree.items {
if te.isTree() {
trees[p+te.name+"/"] = te.hash
submoduleList[p+te.name] = te.hash
} else if te.isSubmodule() {
submoduleList[p+te.name] = te.hash
}
@@ -918,16 +917,6 @@ type GitStatusData struct {
Path string
Status int
States [3]string
/*
<sub> A 4 character field describing the submodule state.
"N..." when the entry is not a submodule.
"S<c><m><u>" when the entry is a submodule.
<c> is "C" if the commit changed; otherwise ".".
<m> is "M" if it has tracked changes; otherwise ".".
<u> is "U" if there are untracked changes; otherwise ".".
*/
SubmoduleChanges string
}
func parseGitStatusHexString(data io.ByteReader) (string, error) {
@@ -950,20 +939,6 @@ func parseGitStatusHexString(data io.ByteReader) (string, error) {
}
}
func parseGitStatusString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 || c == ' ' {
return string(str), nil
}
str = append(str, c)
}
}
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
@@ -1004,7 +979,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Modified
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1014,11 +989,11 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Renamed
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
ret.States[0], err = parseGitStatusStringWithSpace(data)
ret.States[0], err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1028,7 +1003,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Untracked
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
@@ -1038,22 +1013,15 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Ignored
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}
case 'u':
var err error
if err = skipGitStatusEntry(data, 2); err != nil {
if err = skipGitStatusEntry(data, 7); err != nil {
return nil, err
}
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
return nil, err
}
if err = skipGitStatusEntry(data, 4); err != nil {
return nil, err
}
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
@@ -1064,7 +1032,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Unmerged
ret.Path, err = parseGitStatusStringWithSpace(data)
ret.Path, err = parseGitStatusString(data)
if err != nil {
return nil, err
}

View File

@@ -555,8 +555,6 @@ func TestGitStatusParse(t *testing.T) {
Path: ".gitmodules",
Status: GitStatus_Unmerged,
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
SubmoduleChanges: "N...",
},
},
},

View File

@@ -718,18 +718,20 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
}
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
limit := int64(20)
page := int64(1)
resCount := 1
resCount := limit
retData := []*models.TimelineComment{}
for resCount > 0 {
for resCount == limit {
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
issue.NewIssueGetCommentsAndTimelineParams().
WithOwner(org).
WithRepo(repo).
WithIndex(idx).
WithPage(&page),
WithPage(&page).
WithLimit(&limit),
gitea.transport.DefaultAuthentication,
)
@@ -737,13 +739,11 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
return nil, err
}
resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount)
resCount = int64(len(res.Payload))
page++
retData = append(retData, res.Payload...)
}
LogDebug("total results:", len(retData))
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
return time.Time(b.Created).Compare(time.Time(a.Created))

324
common/listen.go Normal file
View File

@@ -0,0 +1,324 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"runtime/debug"
"slices"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type ListenDefinitions struct {
RabbitURL *url.URL // amqps://user:password@host/queue
GitAuthor string
Handlers map[string]RequestProcessor
Orgs []string
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitMessage rabbitmq.Delivery
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
ch, err := connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer ch.Close()
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.processTopicChanges(ch, q.Name)
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.connectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.processRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
ch := make(chan RabbitMessage, 100)
go l.connectAndProcessRabbitMQ(ch)
return ch
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}
func (l *ListenDefinitions) generateTopics() []string {
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
scope := "suse"
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range l.Orgs {
for requestType, _ := range l.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (l *ListenDefinitions) UpdateTopics() {
newTopics := l.generateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
LogDebug("# Handlers:", len(l.Handlers))
LogDebug("# Orgs:", len(l.Orgs))
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = l.generateTopics()
ch := l.connectToRabbitMQ()
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(l.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
continue
}
LogDebug("org:", org, "type:", reqType)
if handler, found := l.Handlers[reqType]; found {
/* h, err := CreateRequestHandler()
if err != nil {
log.Println("Cannot create request handler", err)
continue
}
*/
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
continue
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
}
}

View File

@@ -50,13 +50,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
u, _ := url.Parse("amqps://rabbit.example.com")
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l := &RabbitMQGiteaEventsProcessor{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
c: &RabbitConnection{
RabbitURL: u,
topicSubChanges: make(chan string, len(test.topicDelta)*10),
},
l := ListenDefinitions{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
topicSubChanges: make(chan string, len(test.topicDelta)*10),
RabbitURL: u,
}
slices.Sort(test.topicDelta)
@@ -66,11 +64,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
}
changes := []string{}
l.c.UpdateTopics(l)
l.UpdateTopics()
a:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
changes = []string{}
@@ -80,13 +78,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
l.Orgs = test.orgs2
l.c.UpdateTopics(l)
l.UpdateTopics()
changes = []string{}
b:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
slices.Sort(changes)

View File

@@ -1,56 +0,0 @@
package common
import (
"os"
"path"
"strings"
"gopkg.in/yaml.v3"
)
type Manifest struct {
Subdirectories []string
}
func (m *Manifest) SubdirForPackage(pkg string) string {
if m == nil {
return pkg
}
idx := -1
matchLen := 0
basePkg := path.Base(pkg)
lowercasePkg := strings.ToLower(basePkg)
for i, sub := range m.Subdirectories {
basename := strings.ToLower(path.Base(sub))
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
idx = i
matchLen = len(basename)
}
}
if idx > -1 {
return path.Join(m.Subdirectories[idx], basePkg)
}
return pkg
}
func ReadManifestFile(filename string) (*Manifest, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
return ParseManifestFile(data)
}
func ParseManifestFile(data []byte) (*Manifest, error) {
ret := &Manifest{}
err := yaml.Unmarshal(data, ret)
if err != nil {
return nil, err
}
return ret, nil
}

View File

@@ -1,56 +0,0 @@
package common_test
import (
"testing"
"src.opensuse.org/autogits/common"
)
func TestManifestSubdirAssignments(t *testing.T) {
tests := []struct {
Name string
ManifestContent string
Packages []string
ManifestLocations []string
}{
{
Name: "empty manifest",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
ManifestLocations: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
},
{
Name: "only few subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "b/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest with trailing /",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
},
}
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
m, err := common.ParseManifestFile([]byte(test.ManifestContent))
if err != nil {
t.Fatal(err)
}
for i, pkg := range test.Packages {
expected := test.ManifestLocations[i]
if l := m.SubdirForPackage(pkg); l != expected {
t.Error("Expected:", expected, "but got:", l)
}
}
})
}
}

View File

@@ -551,7 +551,6 @@ func (c *ObsClient) DeleteProject(project string) error {
query.Add("force", "1")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("DELETE", url.String(), nil)
if err != nil {
return err
}
@@ -562,21 +561,7 @@ func (c *ObsClient) DeleteProject(project string) error {
}
return nil
}
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
query := url.Query()
query.Add("nostream", "1")
query.Add("start", "0")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("GET", url.String(), nil)
if err != nil {
return nil, err
}
return res.Body, nil
}
type PackageBuildStatus struct {

View File

@@ -25,13 +25,6 @@ type PRSet struct {
BotUser string
}
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
org = prinfo.PR.Base.Repo.Owner.UserName
repo = prinfo.PR.Base.Repo.Name
idx = prinfo.PR.Index
return
}
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
for _, p := range currentSet {
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
@@ -128,28 +121,27 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
}, nil
}
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
func (rs *PRSet) Contains(pr *models.PullRequest) bool {
for _, p := range rs.PRs {
if p.PR.Base.RepoID == pr.Base.RepoID &&
p.PR.Head.Sha == pr.Head.Sha &&
p.PR.Base.Name == pr.Base.Name {
return p, true
return true
}
}
return nil, false
return false
}
func (rs *PRSet) AddPR(pr *models.PullRequest) *PRInfo {
if pr, found := rs.Find(pr); found {
return pr
func (rs *PRSet) AddPR(pr *models.PullRequest) error {
if rs.Contains(pr) {
return nil
}
prinfo := &PRInfo{
rs.PRs = append(rs.PRs, &PRInfo{
PR: pr,
}
rs.PRs = append(rs.PRs, prinfo)
return prinfo
})
return nil
}
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
@@ -180,15 +172,6 @@ func (rs *PRSet) GetPrjGitPR() (*PRInfo, error) {
return nil, PRSet_PrjGitMissing
}
func (rs *PRSet) NeedRecreatingPrjGit(currentBranchHash string) bool {
pr, err := rs.GetPrjGitPR()
if err != nil {
return true
}
return pr.PR.Base.Sha == currentBranchHash
}
func (rs *PRSet) IsConsistent() bool {
prjpr_info, err := rs.GetPrjGitPR()
if err != nil {
@@ -319,7 +302,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
}
pr.Reviews = r
if !pr.Reviews.IsManualMergeOK() {
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
LogInfo("Not approved manual merge")
return false
}
}

View File

@@ -15,23 +15,7 @@ import (
"src.opensuse.org/autogits/common/gitea-generated/models"
mock_common "src.opensuse.org/autogits/common/mock"
)
/*
func TestCockpit(t *testing.T) {
common.SetLoggingLevel(common.LogLevelDebug)
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Fail to timeline", err)
}
t.Log(tl)
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Error:", err)
}
t.Error(r)
}
*/
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
timeline := make([]*models.TimelineComment, len(reviews))
for idx, review := range reviews {

View File

@@ -1,238 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
type RabbitConnection struct {
RabbitURL *url.URL // amqps://user:password@host/queue
queueName string
ch *rabbitmq.Channel
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitProcessor interface {
GenerateTopics() []string
Connection() *RabbitConnection
ProcessRabbitMessage(msg RabbitMessage) error
}
type RabbitMessage rabbitmq.Delivery
func (l *RabbitConnection) ProcessTopicChanges() {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer l.ch.Close()
if err = l.ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = l.ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = l.ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.ProcessTopicChanges()
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := l.ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.ConnectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.ProcessRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = processor.GenerateTopics()
ch := make(chan RabbitMessage, 100)
go l.ConnectAndProcessRabbitMQ(ch)
return ch
}
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
newTopics := processor.GenerateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
ch := processor.Connection().ConnectToRabbitMQ(processor)
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
if err := processor.ProcessRabbitMessage(msg); err != nil {
LogError("Error processing", msg.RoutingKey, err)
}
}
}

View File

@@ -1,128 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"fmt"
"runtime/debug"
"slices"
"strings"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type RabbitMQGiteaEventsProcessor struct {
Handlers map[string]RequestProcessor
Orgs []string
c *RabbitConnection
}
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
if gitea.c == nil {
gitea.c = &RabbitConnection{}
}
return gitea.c
}
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
scope := "suse"
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range gitea.Orgs {
for requestType, _ := range gitea.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(gitea.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
return nil
}
LogDebug("org:", org, "type:", reqType)
if handler, found := gitea.Handlers[reqType]; found {
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
return nil
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
return fmt.Errorf("Invalid routing key: %s", route)
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}

View File

@@ -1,22 +0,0 @@
package common
type RabbitMQObsBuildStatusProcessor struct {
c *RabbitConnection
}
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
return []string{}
}
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
if o.c == nil {
o.c = &RabbitConnection{}
}
return o.c
}
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
return nil
}

View File

@@ -25,39 +25,26 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, r
return nil, err
}
reviews := make([]*models.PullReview, 0, len(reviewers))
reviews := make([]*models.PullReview, 0, 10)
var comments []*models.TimelineComment
alreadyHaveUserReview := func(user string) bool {
for _, r := range reviews {
if r.User != nil && r.User.UserName == user {
return true
}
}
return false
}
for idx, item := range timeline {
if item.Type == TimelineCommentType_Review {
for _, r := range rawReviews {
if r.ID == item.ReviewID {
if !alreadyHaveUserReview(r.User.UserName) {
reviews = append(reviews, r)
}
reviews = append(reviews, r)
break
}
}
} else if item.Type == TimelineCommentType_Comment {
comments = append(comments, item)
} else if item.Type == TimelineCommentType_PushPull {
LogDebug("cut-off", item.Created)
timeline = timeline[0:idx]
break
} else {
LogDebug("Unhandled timeline type:", item.Type)
}
}
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
return &PRReviews{
reviews: reviews,
@@ -85,7 +72,6 @@ func (r *PRReviews) IsManualMergeOK() bool {
if c.Updated != c.Created {
continue
}
LogDebug("comment:", c.User.UserName, c.Body)
if slices.Contains(r.reviewers, c.User.UserName) {
if bodyCommandManualMergeOK(c.Body) {
return true

View File

@@ -113,10 +113,6 @@ func (s *Submodule) parseKeyValue(line string) error {
return nil
}
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
return manifest.SubdirForPackage(s.Path)
}
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
data, err := io.ReadAll(reader)
if err != nil {

View File

@@ -73,10 +73,6 @@ func runObsCommand(args ...string) ([]string, error) {
var DebugMode bool
func giteaPackage(pkg string) string {
return strings.ReplaceAll(pkg, "+", "_")
}
func projectMaintainer(obs *common.ObsClient, prj string) ([]string, []string) { // users, groups
meta, err := obs.GetProjectMeta(prj)
if err != nil {
@@ -190,16 +186,13 @@ func cloneDevel(git common.Git, gitDir, outName, urlString string) error {
}
func importRepos(packages []string) {
RepoToObsName := make(map[string]string)
factoryRepos := make([]*models.Repository, 0, len(packages)*2)
develProjectPackages := make([]string, 0, len(packages))
for _, pkg := range packages {
src_pkg_name := strings.Split(pkg, ":")
RepoToObsName[giteaPackage(src_pkg_name[0])] = src_pkg_name[0]
repo, err := client.Repository.RepoGet(
repository.NewRepoGetParams().
WithDefaults().WithOwner("pool").WithRepo(giteaPackage(src_pkg_name[0])),
WithDefaults().WithOwner("pool").WithRepo(src_pkg_name[0]),
r.DefaultAuthentication)
if err != nil {
@@ -226,7 +219,7 @@ func importRepos(packages []string) {
oldPackageNames := make([]string, 0, len(factoryRepos))
for _, repo := range factoryRepos {
oldPackageNames = append(oldPackageNames, RepoToObsName[repo.Name])
oldPackageNames = append(oldPackageNames, repo.Name)
}
// fork packags from pool
@@ -248,60 +241,44 @@ func importRepos(packages []string) {
log.Println("adding remotes...")
for i := 0; i < len(factoryRepos); i++ {
pkg := factoryRepos[i]
pkgName := RepoToObsName[pkg.Name]
gitName := pkg.Name
// verify that package was created by `git-importer`, or it's scmsync package and clone it
fi, err := os.Stat(filepath.Join(git.GetPath(), gitName))
fi, err := os.Stat(filepath.Join(git.GetPath(), pkg.Name))
if os.IsNotExist(err) {
if slices.Contains(develProjectPackages, pkgName) {
if slices.Contains(develProjectPackages, pkg.Name) {
// failed import of former factory package
log.Println("Failed to import former factory pkg:", pkgName)
continue
}
// scmsync?
devel_project, err := devel_projects.GetDevelProject(pkgName)
devel_project, err := devel_projects.GetDevelProject(pkg.Name)
if err != nil {
log.Panicln("devel project not found for", RepoToObsName[pkg.Name], "err:", err)
log.Panicln("devel project not found for", pkg.Name, "err:", err)
}
meta, _ := obs.GetPackageMeta(devel_project, pkgName)
meta, _ := obs.GetPackageMeta(devel_project, pkg.Name)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", gitName, meta.ScmSync); err != nil {
if err2 := cloneDevel(git, "", pkg.Name, meta.ScmSync); err != nil {
log.Panicln(err2)
}
if err2 := git.GitExec(gitName, "checkout", "-B", "main"); err2 != nil {
git.GitExecOrPanic(gitName, "checkout", "-B", "master")
}
git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main")
continue
}
// try again, should now exist
if fi, err = os.Stat(filepath.Join(git.GetPath(), gitName)); err != nil {
if fi, err = os.Stat(filepath.Join(git.GetPath(), pkg.Name)); err != nil {
log.Panicln(err)
}
} else if err != nil {
log.Panicln(err)
} else {
// verify that we do not have scmsync for imported packages
meta, err := obs.GetPackageMeta(prj, pkgName)
meta, err := obs.GetPackageMeta(prj, pkg.Name)
if err != nil {
log.Panicln(err)
}
if len(meta.ScmSync) > 0 {
u, err := url.Parse(meta.ScmSync)
if err != nil {
log.Println("Invlid scmsync in", pkg, meta.ScmSync, err)
}
o, err := url.Parse(strings.TrimSpace(git.GitExecWithOutputOrPanic(gitName, "remote", "get-url", "origin")))
log.Println("Invlid scmsync in git repo", pkg, meta.ScmSync, err)
if u.Host != o.Host || u.Path != u.Path {
log.Panicln("importing an scmsync package??:", prj, gitName)
} else {
log.Println("previous SCMSYNC package. Pull.")
git.GitExecOrPanic(gitName, "pull", "origin", "HEAD:main")
}
log.Panicln("importing an scmsync package??:", prj, pkg.Name)
}
}
@@ -310,11 +287,11 @@ func importRepos(packages []string) {
}
// add remote repos
out := git.GitExecWithOutputOrPanic(gitName, "remote", "show", "-n")
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show", "-n")
switch pkg.Owner.UserName {
case "pool":
if !slices.Contains(strings.Split(out, "\n"), "pool") {
out := git.GitExecWithOutputOrPanic(gitName, "remote", "add", "pool", pkg.CloneURL)
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "add", "pool", pkg.CloneURL)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
@@ -324,8 +301,7 @@ func importRepos(packages []string) {
}
}
for idx := 0; idx < len(oldPackageNames); idx++ {
pkgName := oldPackageNames[idx]
for _, pkgName := range oldPackageNames {
log.Println("fetching git:", pkgName)
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "remote", "show", "-n"), "\n")
@@ -356,7 +332,7 @@ func importRepos(packages []string) {
if forceBadPool {
log.Println(" *** factory has no branches!!! Treating as a devel package.")
develProjectPackages = append(develProjectPackages, pkgName)
continue
break
} else {
log.Panicln(" *** factory has no branches", branches)
}
@@ -414,34 +390,20 @@ func importRepos(packages []string) {
if !found {
log.Println("*** WARNING: Cannot find same tree for pkg", pkgName, "Will use current import instead")
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
log.Println("setting main to", "heads/"+import_branch)
}
} else {
log.Println("setting main to", "heads/"+import_branch)
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
}
}
for i := 0; i < len(develProjectPackages); i++ {
pkg := develProjectPackages[i]
log.Println("setting main branch for devel package:", pkg)
meta, err := obs.GetPackageMeta(prj, pkg)
if err != nil {
meta, err = obs.GetPackageMeta(prj, pkg)
if err != nil {
log.Println("Error fetching pkg meta for:", prj, pkg, err)
meta, _ := obs.GetPackageMeta(prj, pkg)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
}
if meta == nil {
log.Println(" **** pkg meta is nil? ****")
} else if len(meta.ScmSync) > 0 {
if _, err := os.Stat(path.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
}
log.Println("skip for scmsync")
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
continue
} else {
common.PanicOnError(gitImporter(prj, pkg))
@@ -459,7 +421,6 @@ func importRepos(packages []string) {
log.Println(" *** pool branch 'devel' ahead. Switching branches.")
branch = "devel"
}
log.Println("setting main to", branch)
git.GitExecOrPanic(pkg, "checkout", "-B", "main", branch)
}
@@ -482,7 +443,6 @@ func importRepos(packages []string) {
})
for _, pkg := range factoryRepos {
log.Println("factory fork creator for develProjectPackage:", pkg.Name)
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg.Name), r.DefaultAuthentication); err != nil {
// update package
@@ -505,24 +465,16 @@ func importRepos(packages []string) {
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg.Name, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkgName, "fetch", "devel")
// git.GitExecOrPanic(pkg.Name, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg.Name, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg.Name, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg.Name, "push", "develorigin", "main", "-f")
branches := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "branch", "-r"), "\n")
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg.Name, "push", "develorigin", "--delete", b)
}
}
}
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(repo.Name)).WithBody(&models.EditRepoOption{
git.GitExec(pkg.Name, "push", "develorigin", "--delete", "factory", "devel")
// git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
HasPullRequests: true,
@@ -546,15 +498,13 @@ func importRepos(packages []string) {
}
for _, pkg := range develProjectPackages {
log.Println("repo creator for develProjectPackage:", pkg)
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
giteaPkg := giteaPackage(pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
_, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &giteaPkg,
Name: &pkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
@@ -564,7 +514,7 @@ func importRepos(packages []string) {
log.Panicln("Error creating new package repository:", pkg, err)
}
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPkg).WithBody(
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(
&models.EditRepoOption{
HasPullRequests: true,
HasPackages: false,
@@ -602,17 +552,9 @@ func importRepos(packages []string) {
git.GitExecOrPanic(pkg, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
branches := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "branch", "-r"), "\n")
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg, "push", "develorigin", "--delete", b)
}
}
}
git.GitExec(pkg, "push", "develorigin", "--delete", "factory", "devel")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.EditRepoOption{
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
}), r.DefaultAuthentication)
@@ -711,7 +653,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
missing := []string{}
uids := make([]common.PersonRepoMeta, len(orig_uids))
copy(uids, orig_uids)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
if err != nil {
if errors.Is(err, &repository.RepoListCollaboratorsNotFound{}) {
return missing
@@ -732,7 +674,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
log.Println("missing collabs for", pkg, ":", uids)
}
for _, u := range uids {
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.AddCollaboratorOption{
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(pkg).WithBody(&models.AddCollaboratorOption{
Permission: "write",
}).WithCollaborator(u.UserID), r.DefaultAuthentication)
@@ -867,14 +809,14 @@ func createPrjGit() {
if err != nil {
log.Panicln(err)
}
file.WriteString("{\n // Reference build project\n \"ObsProject\": \"" + prj + "\",\n}\n")
file.WriteString("{\n // Reference build project\n \"ObsProject\": \""+prj+"\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "staging.config")
if file, err = os.Create(path.Join(git.GetPath(), common.DefaultGitPrj, "workflow.config")); err != nil {
log.Panicln(err)
}
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \"" + org + "\",\n}\n")
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \""+org+"\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "workflow.config")
}
@@ -915,7 +857,6 @@ func main() {
syncMaintainers := flags.Bool("sync-maintainers-only", false, "Sync maintainers to Gitea and exit")
flags.BoolVar(&forceBadPool, "bad-pool", false, "Force packages if pool has no branches due to bad import")
flags.BoolVar(&forceNonPoolPackages, "non-pool", false, "Allow packages that are not in pool to be created. WARNING: Can't add to factory later!")
specificPackages := flags.String("packages", "", "Process specific package, separated by commas, ignoring the others")
if help := flags.Parse(os.Args[1:]); help == flag.ErrHelp || flags.NArg() != 2 {
printHelp(helpString.String())
@@ -1012,15 +953,11 @@ func main() {
if *purgeOnly {
log.Println("Purging repositories...")
for _, pkg := range packages {
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
}
os.Exit(10)
}
if len(*specificPackages) != 0 {
importRepos(common.SplitStringNoEmpty(*specificPackages, ","))
return
}
importRepos(packages)
syncMaintainersToGitea(packages)
}

View File

@@ -1,46 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"io"
"os"
"github.com/tailscale/hujson"
)
type Config struct {
ForgeEndpoint string `json:"forge_url"`
Keys []string `json:"keys"`
}
type contextKey string
const configKey contextKey = "config"
func ReadConfig(reader io.Reader) (*Config, error) {
data, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("error reading config data: %w", err)
}
config := Config{}
data, err = hujson.Standardize(data)
if err != nil {
return nil, fmt.Errorf("failed to parse json: %w", err)
}
if err := json.Unmarshal(data, &config); err != nil {
return nil, fmt.Errorf("error parsing json to api keys and target url: %w", err)
}
return &config, nil
}
func ReadConfigFile(filename string) (*Config, error) {
file, err := os.Open(filename)
if err != nil {
return nil, fmt.Errorf("cannot open config file for reading. err: %w", err)
}
defer file.Close()
return ReadConfig(file)
}

View File

@@ -1,15 +0,0 @@
package main
import (
"context"
"net/http"
)
func ConfigMiddleWare(cfg *Config) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := context.WithValue(r.Context(), configKey, cfg)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
}

View File

@@ -1,169 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"net/http"
"os"
"slices"
"strings"
"src.opensuse.org/autogits/common"
)
type Status struct {
Context string `json:"context"`
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
type StatusInput struct {
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
func main() {
configFile := flag.String("config", "", "status proxy config file")
flag.Parse()
if *configFile == "" {
common.LogError("missing required argument config")
return
}
config, err := ReadConfigFile(*configFile)
if err != nil {
common.LogError("Failed to read config file", err)
return
}
mux := http.NewServeMux()
mux.Handle("/repos/{owner}/{repo}/statuses/{sha}", ConfigMiddleWare(config)(http.HandlerFunc(StatusProxy)))
common.LogInfo("server up and listening on :3000")
err = http.ListenAndServe(":3000", mux)
if err != nil {
common.LogError("Server failed to start up", err)
}
}
func StatusProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
config, ok := r.Context().Value(configKey).(*Config)
if !ok {
common.LogError("Config missing from context")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
header := r.Header.Get("Authorization")
if header == "" {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token_arr := strings.Split(header, " ")
if len(token_arr) != 2 {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
if !strings.EqualFold(token_arr[0], "Bearer") {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token := token_arr[1]
if !slices.Contains(config.Keys, token) {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
owner := r.PathValue("owner")
repo := r.PathValue("repo")
sha := r.PathValue("sha")
if !ok {
common.LogError("Failed to get config from context, is it set?")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
}
posturl := fmt.Sprintf("%s/repos/%s/%s/statuses/%s", config.ForgeEndpoint, owner, repo, sha)
decoder := json.NewDecoder(r.Body)
var statusinput StatusInput
err := decoder.Decode(&statusinput)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
status := Status{
Context: "Build in obs",
State: statusinput.State,
TargetUrl: statusinput.TargetUrl,
}
status_payload, err := json.Marshal(status)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
client := &http.Client{}
req, err := http.NewRequest("POST", posturl, bytes.NewBuffer(status_payload))
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
ForgeToken := os.Getenv("GITEA_TOKEN")
if ForgeToken == "" {
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
common.LogError("GITEA_TOKEN was not set, all requests will fail")
return
}
req.Header.Add("Content-Type", "Content-Type")
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
resp, err := client.Do(req)
if err != nil {
common.LogError(fmt.Sprintf("Request to forge endpoint failed: %v", err))
http.Error(w, http.StatusText(http.StatusBadGateway), http.StatusBadGateway)
return
}
defer resp.Body.Close()
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(resp.StatusCode)
/*
the commented out section sets every key
value from the headers, unsure if this
leaks information from gitea
for k, v := range resp.Header {
for _, vv := range v {
w.Header().Add(k, vv)
}
}
*/
_, err = io.Copy(w, resp.Body)
if err != nil {
common.LogError("Error copying response body: %v", err)
}
} else {
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
return
}
}

3
go.mod
View File

@@ -16,8 +16,6 @@ require (
require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
@@ -30,7 +28,6 @@ require (
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/redis/go-redis/v9 v9.11.0 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect

6
go.sum
View File

@@ -1,12 +1,8 @@
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -54,8 +50,6 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=

View File

@@ -11,7 +11,6 @@ import (
"strconv"
"strings"
"time"
"unicode"
"src.opensuse.org/autogits/common"
"src.opensuse.org/autogits/common/gitea-generated/models"
@@ -22,10 +21,9 @@ var acceptRx *regexp.Regexp
var rejectRx *regexp.Regexp
var groupName string
func InitRegex(newGroupName string) {
groupName = newGroupName
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
rejectRx = regexp.MustCompile("^:\\s*")
func InitRegex(groupName string) {
acceptRx = regexp.MustCompile("\\s*:\\s*LGTM")
rejectRx = regexp.MustCompile("\\s*:\\s*")
}
func ParseReviewLine(reviewText string) (bool, string) {
@@ -36,18 +34,7 @@ func ParseReviewLine(reviewText string) (bool, string) {
return false, line
}
l := line[glen:]
for idx, r := range l {
if unicode.IsSpace(r) {
continue
} else if r == ':' {
return true, l[idx:]
} else {
return false, line
}
}
return false, line
return true, line[glen:]
}
func ReviewAccepted(reviewText string) bool {

View File

@@ -2,76 +2,6 @@ package main
import "testing"
func TestReviewApprovalCheck(t *testing.T) {
tests := []struct {
Name string
GroupName string
InString string
Approved bool
Rejected bool
}{
{
Name: "Empty String",
GroupName: "group",
InString: "",
},
{
Name: "Random Text",
GroupName: "group",
InString: "some things LGTM",
},
{
Name: "Group name with Random Text means disapproval",
GroupName: "group",
InString: "@group: some things LGTM",
Rejected: true,
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "LGTM approval",
GroupName: "group2",
InString: "@group2: LGTM",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approved",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approve",
Approved: true,
},
{
Name: "disapproval",
GroupName: "group2",
InString: "@group2: disapprove",
Rejected: true,
},
}
func TestReviews(t *testing.T) {
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
InitRegex(test.GroupName)
if r := ReviewAccepted(test.InString); r != test.Approved {
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
}
if r := ReviewRejected(test.InString); r != test.Rejected {
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
}
})
}
}

View File

@@ -263,7 +263,7 @@ func ProcessRepoBuildStatus(results, ref []common.PackageBuildStatus) (status Bu
return BuildStatusSummarySuccess, SomeSuccess
}
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string) (*common.ProjectMeta, error) {
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
common.LogDebug("repo content fetching ...")
err := FetchPrGit(git, pr)
if err != nil {
@@ -289,7 +289,15 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
}
meta, err := ObsClient.GetProjectMeta(buildPrj)
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
if err == nil {
// success, so we use that staging master project as our build project
buildPrj = stagingMasterPrj
} else {
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
meta, err = ObsClient.GetProjectMeta(buildPrj)
}
if err != nil {
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
return nil, err
@@ -414,7 +422,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
var state RequestModification = RequestModificationSourceChanged
if meta == nil {
// new build
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject)
common.LogDebug(" Staging master:", config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
if err != nil {
return RequestModificationNoChange, err
}
@@ -428,6 +437,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
} else {
err = ObsClient.SetProjectMeta(meta)
if err != nil {
x, _ := xml.MarshalIndent(meta, "", " ")
common.LogDebug(" meta:", string(x))
common.LogError("cannot create meta project:", err)
return RequestModificationNoChange, err
}
@@ -643,7 +654,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogError("No PR associated with review:", org, "/", repo, "#", id, "Error:", err)
return true, err
}
common.LogDebug("PR state:", pr.State)
if pr.State == "closed" {
// dismiss the review
@@ -660,40 +670,68 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
}
if review, err := FetchOurLatestActionableReview(gitea, org, repo, id); err == nil {
common.LogInfo("processing review", review.HTMLURL, "state", review.State)
// Fetching data
review, review_error := FetchOurLatestActionableReview(gitea, org, repo, id)
if pr.State != "closed" && review_error != nil {
// Nothing to do
return true, nil
}
err = FetchPrGit(git, pr)
if err != nil {
common.LogError("Cannot fetch PR git:", pr.URL)
return false, err
err = FetchPrGit(git, pr)
if err != nil {
common.LogError("Cannot fetch PR git:", pr.URL)
return false, err
}
// we want the possibly pending modification here, in case stagings are added, etc.
// jobs of review team to deal with issues
common.LogDebug("QA configuration fetching ...", common.StagingConfigFile)
data, err := git.GitCatFile(pr.Head.Sha, pr.Head.Sha, common.StagingConfigFile)
if err != nil {
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
if !IsDryRun {
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
}
return true, err
}
// we want the possibly pending modification here, in case stagings are added, etc.
// jobs of review team to deal with issues
common.LogDebug("QA configuration fetching ...", common.StagingConfigFile)
data, err := git.GitCatFile(pr.Head.Sha, pr.Head.Sha, common.StagingConfigFile)
if err != nil {
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
if !IsDryRun {
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
}
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
}
if stagingConfig.ObsProject == "" {
common.LogError("Cannot find reference project for PR#", pr.Index)
if !IsDryRun && review_error == nil {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
return true, err
}
return true, nil
}
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
}
common.LogDebug("ObsProject:", stagingConfig.ObsProject)
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, ObsClient.HomeProject, pr)
if stagingConfig.ObsProject == "" {
common.LogError("Cannot find reference project for PR#", pr.Index)
// Cleanup projects
if pr.State == "closed" {
// review is done, cleanup
common.LogInfo(" -- closed request, cleanup staging projects")
for _, setup := range stagingConfig.QA {
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find reference project")
return true, err
ObsClient.DeleteProject(stagingProject + ":" + setup.Name)
}
return true, nil
}
if stagingProject != "" {
if !IsDryRun {
ObsClient.DeleteProject(stagingProject)
}
}
return true, nil
}
// Process review aka setup projects
if review_error == nil {
common.LogInfo("processing review", review.HTMLURL, "state", review.State)
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
if err != nil {
@@ -727,16 +765,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
}
if stagingConfig.StagingProject != "" {
// staging project must either be nothing or be *under* the target project.
// other setups are currently not supported
// NOTE: this is user input, so we need some limits here
l := len(stagingConfig.ObsProject)
if l >= len(stagingConfig.StagingProject) || stagingConfig.ObsProject != stagingConfig.StagingProject[0:l] {
common.LogError("StagingProject (", stagingConfig.StagingProject, ") is not child of target project", stagingConfig.ObsProject)
}
}
if meta.Name != stagingConfig.ObsProject {
common.LogError("staging.config . ObsProject:", stagingConfig.ObsProject, " is not target project name", meta.Name)
if !IsDryRun {
@@ -757,17 +785,22 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogDebug(" # head submodules:", len(headSubmodules))
common.LogDebug(" # base submodules:", len(baseSubmodules))
modifiedOrNew := make([]string, 0, 16)
modifiedPackages := make([]string, 0, 16)
newPackages := make([]string, 0, 16)
if !stagingConfig.RebuildAll {
for pkg, headOid := range headSubmodules {
if baseOid, exists := baseSubmodules[pkg]; !exists || baseOid != headOid {
modifiedOrNew = append(modifiedOrNew, pkg)
if len(baseOid) > 0 {
modifiedPackages = append(modifiedPackages, pkg)
} else {
newPackages = append(newPackages, pkg)
}
common.LogDebug(pkg, ":", baseOid, "->", headOid)
}
}
}
if len(modifiedOrNew) == 0 {
if len(modifiedPackages) == 0 && len(newPackages) == 0 {
rebuild_all := false || stagingConfig.RebuildAll
reviews, err := gitea.GetPullRequestReviews(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
@@ -844,13 +877,13 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
gitea.AddComment(pr, msg)
}
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedOrNew...)
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedPackages...)
if err != nil {
common.LogError("failed fetching ref project status for", stagingConfig.ObsProject, ":", err)
}
stagingResult, err := ObsClient.BuildStatus(stagingProject)
if err != nil {
common.LogError("failed fetching ref project status for", stagingProject, ":", err)
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
}
buildStatus := ProcessBuildStatus(stagingResult, baseResult)

View File

@@ -1,2 +1 @@
obs-status-service
*.svg

View File

@@ -22,13 +22,8 @@ import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"slices"
"strings"
"time"
"src.opensuse.org/autogits/common"
)
@@ -38,18 +33,27 @@ const (
)
var obs *common.ObsClient
var debug bool
func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
list := common.BuildResultList{
Result: res,
func LogDebug(v ...any) {
if debug {
log.Println(v...)
}
pkgs := list.GetPackageList()
}
func ProjectStatusSummarySvg(project string) []byte {
res := GetCurrentStatus(project)
if res == nil {
return nil
}
pkgs := res.GetPackageList()
maxLen := 0
for _, p := range pkgs {
maxLen = max(maxLen, len(p))
}
width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
width := float32(len(res.Result))*1.5 + float32(maxLen)*0.8
height := 1.5*float32(maxLen) + 30
ret := bytes.Buffer{}
@@ -60,78 +64,21 @@ func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
ret.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
ret.WriteString(`<defs>
<g id="f"> <!-- failed -->
<rect width="8em" height="1.5em" fill="#800" />
<rect width="1em" height="1em" fill="#800" />
</g>
<g id="s"> <!--succeeded-->
<rect width="8em" height="1.5em" fill="#080" />
<rect width="1em" height="1em" fill="#080" />
</g>
<g id="buidling"> <!--building-->
<rect width="8em" height="1.5em" fill="#880" />
<rect width="1em" height="1em" fill="#880" />
</g>
</defs>`)
ret.WriteString(`<use href="#f" x="1em" y="2em"/>`)
ret.WriteString(`</svg>`)
return ret.Bytes()
}
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
if R != nil && S != nil {
switch S.Code {
case "succeeded", "failed", "building":
return "/buildlog/" + R.Project + "/" + S.Package + "/" + R.Repository + "/" + R.Arch
}
}
return ""
}
func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
// per repo, per arch status bins
repo_names := []string{}
package_names := []string{}
multibuild_prefix := pkg + ":"
for _, r := range res {
if pos, found := slices.BinarySearchFunc(repo_names, r.Repository, strings.Compare); !found {
repo_names = slices.Insert(repo_names, pos, r.Repository)
}
for _, p := range r.Status {
if p.Package == pkg || strings.HasPrefix(p.Package, multibuild_prefix) {
if pos, found := slices.BinarySearchFunc(package_names, p.Package, strings.Compare); !found {
package_names = slices.Insert(package_names, pos, p.Package)
}
}
}
}
ret := NewSvg()
for _, pkg = range package_names {
// if len(package_names) > 1 {
ret.WriteTitle(pkg)
// }
for _, name := range repo_names {
ret.WriteSubtitle(name)
// print all repo arches here and build results
for _, r := range res {
if r.Repository != name {
continue
}
for _, s := range r.Status {
if s.Package == pkg {
link := LinkToBuildlog(r, s)
ret.WritePackageStatus(link, r.Arch, s.Code, s.Details)
}
}
}
}
}
return ret.GenerateSvg()
}
func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus) []byte {
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
if !ok {
buildStatus = common.ObsBuildStatusDetails["error"]
@@ -148,18 +95,12 @@ func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus)
}
}
buildlog := LinkToBuildlog(repo, status)
startTag := ""
endTag := ""
log.Println(status, " -> ", buildStatus)
if len(buildlog) > 0 {
startTag = "<a href=\"" + buildlog + "\">"
endTag = "</a>"
}
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">` +
`<rect width="100%" height="100%" fill="` + fillColor + `"/>` + startTag +
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>` + endTag + `</svg>`)
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="` + fillColor + `"/>
<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>
</svg>`)
}
func main() {
@@ -167,119 +108,59 @@ func main() {
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
obsUrl := flag.String("obs-url", "https://api.opensuse.org", "OBS API endpoint for package buildlog information")
debug := flag.Bool("debug", false, "Enable debug logging")
// RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
// Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
flag.Parse()
if *debug {
common.SetLoggingLevel(common.LogLevelDebug)
}
// common.PanicOnError(common.RequireObsSecretToken())
common.PanicOnError(common.RequireObsSecretToken())
var err error
if obs, err = common.NewObsClient(*obsUrl); err != nil {
if obs, err = common.NewObsClient(*obsHost); err != nil {
log.Fatal(err)
}
if redisUrl := os.Getenv("REDIS"); len(redisUrl) > 0 {
RedisConnect(redisUrl)
} else {
common.LogError("REDIS needs to contains URL of the OBS Redis instance with login information")
return
}
go func() {
for {
if err := RescanRepositories(); err != nil {
common.LogError("Failed to rescan repositories.", err)
}
time.Sleep(time.Minute * 5)
}
}()
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
common.LogInfo(" request: GET /status/" + obsPrj)
http.HandleFunc("GET /{Project}", func(res http.ResponseWriter, req *http.Request) {
res.WriteHeader(http.StatusBadRequest)
})
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
http.HandleFunc("GET /{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
/*
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
status := FindAndUpdateProjectResults(obsPrj)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
status, _ := PackageBuildStatus(obsPrj, obsPkg)
svg := PackageStatusSummarySvg(status)
*/
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
//res.Header().Add("size", fmt.Sprint(len(svg)))
//res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
status := FindAndUpdateRepoResults(obsPrj, repo)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
common.LogInfo("GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch)
res.Header().Add("content-type", "image/svg+xml")
for _, r := range FindAndUpdateProjectResults(prj) {
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
return
}
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
if idx, found := slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res.Write(BuildStatusSvg(r, r.Status[idx]))
return
for _, status := range r.Status {
if status.Package == pkg {
res.Write(PackageStatusSummarySvg(status))
return
}
}
break
}
}
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
res.Header().Add("location", "https://build.opensuse.org/package/live_build_log/"+prj+"/"+pkg+"/"+repo+"/"+arch)
res.WriteHeader(307)
return
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
data, err := obs.BuildLog(prj, pkg, repo, arch)
if err != nil {
res.WriteHeader(http.StatusInternalServerError)
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
return
}
defer data.Close()
io.Copy(res, data)
})
go ProcessUpdates()
if *disableTls {
log.Fatal(http.ListenAndServe(*listen, nil))

View File

@@ -1,82 +0,0 @@
package main
import (
"os"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusSvg(t *testing.T) {
os.WriteFile("teststatus.svg", BuildStatusSvg(nil, &common.PackageBuildStatus{
Package: "foo",
Code: "succeeded",
Details: "more success here",
}), 0o777)
data := []*common.BuildResult{
{
Project: "project:foo",
Repository: "repo1",
Arch: "x86_64",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "s390x",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "unresolveable",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "i586",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "blocked",
Details: "foo bar is why",
},
},
},
{
Project: "project:foo",
Repository: "TW",
Arch: "s390",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "excluded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
}
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
}

View File

@@ -1,214 +0,0 @@
package main
import (
"context"
"slices"
"strings"
"sync"
"time"
"github.com/redis/go-redis/v9"
"src.opensuse.org/autogits/common"
)
var RepoStatus []*common.BuildResult = []*common.BuildResult{}
var RepoStatusLock *sync.RWMutex = &sync.RWMutex{}
var redisClient *redis.Client
func RedisConnect(RedisUrl string) {
opts, err := redis.ParseURL(RedisUrl)
if err != nil {
panic(err)
}
redisClient = redis.NewClient(opts)
}
func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock()
defer RepoStatusLock.Unlock()
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
common.LogDebug(" + Updating", key)
data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil {
common.LogError("Failed fetching build results for", key, err)
}
common.LogDebug(" + Update size", len(data))
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status {
pkg.LastUpdate = reset_time
}
r.LastUpdate = time.Now()
for pkg, result := range data {
if strings.HasPrefix(result, "scheduled") {
// TODO: lookup where's building
result = "building"
}
var idx int
var found bool
var code string
var details string
if pos := strings.IndexByte(result, ':'); pos > -1 && pos < len(result) {
code = result[0:pos]
details = result[pos+1:]
} else {
code = result
details = ""
}
if idx, found = slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res := r.Status[idx]
res.LastUpdate = r.LastUpdate
res.Code = code
res.Details = details
} else {
r.Status = slices.Insert(r.Status, idx, &common.PackageBuildStatus{
Package: pkg,
Code: code,
Details: details,
LastUpdate: r.LastUpdate,
})
}
}
for idx := 0; idx < len(r.Status); {
if r.Status[idx].LastUpdate == reset_time {
r.Status = slices.Delete(r.Status, idx, idx+1)
} else {
idx++
}
}
}
func FindProjectResults(project string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindRepoResults(project, repo string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project, Repository: repo}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project && RepoStatus[idx].Repository == repo {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func FindAndUpdateRepoResults(project, repo string) []*common.BuildResult {
res := FindRepoResults(project, repo)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func RescanRepositories() error {
ctx := context.Background()
var cursor uint64
var err error
common.LogDebug("** starting rescanning ...")
RepoStatusLock.Lock()
for _, repo := range RepoStatus {
repo.Dirty = false
}
RepoStatusLock.Unlock()
var count int
for {
var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
if err != nil {
return err
}
RepoStatusLock.Lock()
for _, repo := range data {
r := strings.Split(repo, "/")
if len(r) != 3 || len(r[0]) < 8 || r[0][0:7] != "result." {
continue
}
d := &common.BuildResult{
Project: r[0][7:],
Repository: r[1],
Arch: r[2],
}
if pos, found := slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true
} else {
d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d)
count++
}
}
RepoStatusLock.Unlock()
if cursor == 0 {
break
}
}
common.LogDebug(" added a total", count, "repos")
count = 0
RepoStatusLock.Lock()
for i := 0; i < len(RepoStatus); {
if !RepoStatus[i].Dirty {
RepoStatus = slices.Delete(RepoStatus, i, i+1)
count++
} else {
i++
}
}
RepoStatusLock.Unlock()
common.LogDebug(" removed", count, "repos")
common.LogDebug(" total repos:", len(RepoStatus))
return nil
}

View File

@@ -0,0 +1,82 @@
package main
import (
"log"
"slices"
"sync"
"time"
"src.opensuse.org/autogits/common"
)
var WatchedRepos []string
var mutex sync.Mutex
var StatusUpdateCh chan StatusUpdateMsg = make(chan StatusUpdateMsg)
var statusMutex sync.RWMutex
var CurrentStatus map[string]*common.BuildResultList = make(map[string]*common.BuildResultList)
type StatusUpdateMsg struct {
ObsProject string
Result *common.BuildResultList
}
func GetCurrentStatus(project string) *common.BuildResultList {
statusMutex.RLock()
defer statusMutex.RUnlock()
if ret, found := CurrentStatus[project]; found {
return ret
} else {
go WatchObsProject(obs, project)
return nil
}
}
func ProcessUpdates() {
for {
msg := <-StatusUpdateCh
statusMutex.Lock()
CurrentStatus[msg.ObsProject] = msg.Result
drainedChannel:
for {
select {
case msg = <-StatusUpdateCh:
CurrentStatus[msg.ObsProject] = msg.Result
default:
statusMutex.Unlock()
break drainedChannel
}
}
}
}
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
old_state := ""
mutex.Lock()
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
mutex.Unlock()
return
} else {
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
mutex.Unlock()
}
LogDebug("+ watching", ObsProject)
opts := common.BuildResultOptions{}
for {
state, err := obs.BuildStatusWithState(ObsProject, &opts)
if err != nil {
log.Println(" *** Error fetching build for", ObsProject, err)
time.Sleep(time.Minute)
} else {
opts.OldState = state.State
LogDebug(" --> update", ObsProject, " => ", old_state)
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
}
}
}

View File

@@ -0,0 +1,34 @@
package main
import (
"testing"
"go.uber.org/mock/gomock"
"src.opensuse.org/autogits/common"
mock_common "src.opensuse.org/autogits/common/mock"
)
func TestWatchObsProject(t *testing.T) {
tests := []struct {
name string
res common.BuildResultList
}{
{
name: "two requests",
res: common.BuildResultList{
State: "success",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctl := gomock.NewController(t)
obs := mock_common.NewMockObsStatusFetcherWithState(ctl)
obs.EXPECT().BuildStatusWithState("test:foo", "").Return(&test.res, nil)
WatchObsProject(obs, "test:foo")
})
}
}

View File

@@ -1,119 +0,0 @@
package main
import (
"bytes"
"fmt"
"slices"
)
type SvgWriter struct {
ypos float64
header []byte
out bytes.Buffer
}
func NewSvg() *SvgWriter {
svg := &SvgWriter{}
svg.header = []byte(`<svg version="2.0" overflow="auto" width="40ex" height="`)
svg.out.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
return svg
}
func (svg *SvgWriter) WriteTitle(title string) {
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + title + "</text>")
svg.ypos += 2.5
}
func (svg *SvgWriter) WriteSubtitle(subtitle string) {
svg.out.WriteString(`<use href="#repotitle" y="` + fmt.Sprint(svg.ypos-2) + `em"/>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + subtitle + `</text>`)
svg.ypos += 2
}
func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
StatusToSVG := func(S string) string {
switch S {
case "succeeded":
return "s"
case "failed":
return "f"
case "broken", "scheduled":
return S
case "blocked":
return "b"
case "building":
return "build"
case "unresolvable":
return "u"
case "disabled":
return "d"
case "excluded":
return "e"
}
return "un"
}
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + arch + `</text>`)
svg.out.WriteString(`<g>`)
if len(loglink) > 0 {
svg.out.WriteString(`<a href="` + loglink + `">`)
}
svg.out.WriteString(`<use href="#` + StatusToSVG(status) + `" x="20ex" y="` + fmt.Sprint(svg.ypos-1.7) + `em"/>`)
if len(loglink) > 0 {
svg.out.WriteString(`</a>`)
}
if len(detail) > 0 {
svg.out.WriteString(`<title>` + fmt.Sprint(detail) + "</title>")
}
svg.out.WriteString("</g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) GenerateSvg() []byte {
return slices.Concat(svg.header, []byte(fmt.Sprint(svg.ypos)), svg.out.Bytes(), []byte("</svg>"))
}

View File

@@ -526,7 +526,7 @@ func main() {
log.Fatal(err)
}
defs := &common.RabbitMQGiteaEventsProcessor{}
var defs common.ListenDefinitions
var err error
if len(*basePath) == 0 {
@@ -557,7 +557,7 @@ func main() {
}
log.Println("*** Reconfiguring ***")
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().UpdateTopics(defs)
defs.UpdateTopics()
}
}()
signal.Notify(signalChannel, syscall.SIGHUP)
@@ -573,17 +573,18 @@ func main() {
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
defs.GitAuthor = GitAuthor
defs.RabbitURL, err = url.Parse(*rabbitUrl)
if err != nil {
log.Panicf("cannot parse server URL. Err: %#v\n", err)
}
go consistencyCheckProcess()
log.Println("defs:", *defs)
log.Println("defs:", defs)
defs.Handlers = make(map[string]common.RequestProcessor)
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
log.Fatal(common.ProcessRabbitMQEvents(defs))
log.Fatal(defs.ProcessRabbitMQEvents())
}

View File

@@ -162,9 +162,9 @@ func main() {
checker := CreateDefaultStateChecker(*checkOnStart, req, Gitea, time.Duration(*checkIntervalHours)*time.Hour)
go checker.ConsistencyCheckProcess()
listenDefs := &common.RabbitMQGiteaEventsProcessor{
listenDefs := common.ListenDefinitions{
Orgs: orgs,
// GitAuthor: GitAuthor,
GitAuthor: GitAuthor,
Handlers: map[string]common.RequestProcessor{
common.RequestType_PR: req,
common.RequestType_PRSync: req,
@@ -172,7 +172,7 @@ func main() {
common.RequestType_PRReviewRejected: req,
},
}
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
listenDefs.RabbitURL, _ = url.Parse(*rabbitUrl)
common.PanicOnError(common.ProcessRabbitMQEvents(listenDefs))
common.PanicOnError(listenDefs.ProcessRabbitMQEvents())
}

View File

@@ -6,7 +6,6 @@ import (
"fmt"
"path"
"runtime/debug"
"slices"
"strings"
"github.com/opentracing/opentracing-go/log"
@@ -18,33 +17,6 @@ func prGitBranchNameForPR(repo string, prNo int) string {
return fmt.Sprintf("PR_%s#%d", repo, prNo)
}
func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
title_refs := make([]string, 0, len(prset.PRs)-1)
refs := make([]string, 0, len(prset.PRs)-1)
for _, pr := range prset.PRs {
org, repo, idx := pr.PRComponents()
title_refs = append(title_refs, repo)
ref := fmt.Sprintf(common.PrPattern, org, repo, idx)
refs = append(refs, ref)
}
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ",\n")
if prset.Config.ManualMergeOnly {
desc = desc + "\n\nManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
}
if prset.Config.ManualMergeProject {
desc = desc + "\nManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
}
if !prset.Config.ManualMergeOnly && !prset.Config.ManualMergeProject {
desc = desc + "\nAutomatic merge enabled. This will merge when all review requirements are satisfied."
}
return
}
func verifyRepositoryConfiguration(repo *models.Repository) error {
if repo.AutodetectManualMerge && repo.AllowManualMerge {
return nil
@@ -108,40 +80,41 @@ func AllocatePRProcessor(req *common.PullRequestWebhookEvent, configs common.Aut
}
common.LogDebug("git path:", git.GetPath())
// git.GitExecOrPanic("", "config", "set", "--global", "advice.submoduleMergeConflict", "false")
// git.GitExecOrPanic("", "config", "set", "--global", "advice.mergeConflict", "false")
return &PRProcessor{
config: config,
git: git,
}, nil
}
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string, []string, error) {
git := pr.git
subList, err := git.GitSubmoduleList(common.DefaultGitPrj, "HEAD")
if err != nil {
common.LogError("Error fetching submodule list for PrjGit", err)
return err
return nil, nil, err
}
refs := make([]string, 0, len(prset.PRs))
title_refs := make([]string, 0, len(prset.PRs))
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
org, repo, idx := pr.PRComponents()
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
prHead := pr.PR.Head.Sha
revert := false
if pr.PR.State != "open" {
prjGitPR, err := prset.GetPrjGitPR()
prjGitPR, err := prset.GetPrjGitPR()
if prjGitPR != nil {
// remove PR from PrjGit
var valid bool
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
common.LogError("Failed fetching original submodule commit id for repo")
return err
return nil, nil, err
}
}
revert = true
@@ -158,6 +131,9 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
if revert {
commitMsg = fmt.Sprintln("auto-created for", repo, "\n\nThis commit was autocreated by", GitAuthor, "removing\n", ref)
} else {
refs = append(refs, ref)
title_refs = append(title_refs, repo)
}
updateSubmoduleInPR(submodulePath, prHead, git)
@@ -176,7 +152,7 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
common.LogError("Failed to find expected repo:", repo)
}
}
return nil
return title_refs, refs, nil
}
func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet) error {
@@ -187,16 +163,17 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
common.LogError("Failed to fetch PrjGit repository data.", PrjGitOrg, PrjGitRepo, err)
return err
}
RemoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
remoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, RemoteName+"/"+PrjGitBranch)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, remoteName+"/"+PrjGitBranch)
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -206,9 +183,11 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
}
if !common.IsDryRun && headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
title, desc := PrjGitDescription(prset)
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch,
"Forwarded PRs: "+strings.Join(title_refs, ", "),
fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor)+strings.Join(refs, ", "),
)
if err != nil {
common.LogError("Error creating PrjGit PR:", err)
return err
@@ -217,40 +196,12 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
RemoveDeadline: true,
})
prinfo := prset.AddPR(pr)
prinfo.RemoteName = RemoteName
prset.AddPR(pr)
}
return nil
}
func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch string) error {
git := pr.git
PrjGitPR, err := prset.GetPrjGitPR()
common.PanicOnError(err)
remoteBranch := PrjGitPR.RemoteName + "/" + branch
common.LogDebug("Rebasing on top of", remoteBranch)
for conflict := git.GitExec(common.DefaultGitPrj, "rebase", remoteBranch); conflict != nil; {
statuses, err := git.GitStatus(common.DefaultGitPrj)
if err != nil {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
common.PanicOnError(err)
}
for _, s := range statuses {
if s.SubmoduleChanges != "S..." {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
return fmt.Errorf("Unexpected conflict in rebase. %s", s)
}
}
conflict = git.GitExec(common.DefaultGitPrj, "rebase", "--skip")
}
return nil
}
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
_, _, PrjGitBranch := prset.Config.GetPrjGit()
PrjGitPR, err := prset.GetPrjGitPR()
if err != nil {
common.LogError("Updating PrjGitPR but not found?", err)
@@ -261,23 +212,16 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
PrjGit := PrjGitPR.PR.Base.Repo
prjGitPRbranch := PrjGitPR.PR.Head.Name
PrjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
remoteName, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitBranch)
forcePush := false
// trust Gitea here on mergeability
if !PrjGitPR.PR.Mergeable {
common.PanicOnError(pr.RebaseAndSkipSubmoduleCommits(prset, PrjGitBranch))
forcePush = true
}
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -287,14 +231,12 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
}
if !common.IsDryRun && headCommit != newHeadCommit {
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
// update PR
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
PrjGitTitle := "Forwarded PRs: " + strings.Join(title_refs, ", ")
PrjGitBody := fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ", ")
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
RemoveDeadline: true,
Title: PrjGitTitle,
@@ -340,7 +282,9 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
common.LogInfo("PR State is closed:", prjGitPR.PR.State)
for _, pr := range prset.PRs {
if pr.PR.State == "open" {
org, repo, idx := pr.PRComponents()
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
State: "closed",
})
@@ -349,14 +293,6 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
return nil
}
if len(prset.PRs) > 1 {
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
}
}
if err = pr.UpdatePrjGitPR(prset); err != nil {
return err
}
@@ -375,10 +311,13 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
// make sure that prjgit is consistent and only submodules that are to be *updated*
// reset anything that changed that is not part of the prset
// package removals/additions are *not* counted here
org, repo, branch := config.GetPrjGit()
if pr, err := prset.GetPrjGitPR(); err == nil {
common.LogDebug("Submodule parse begin")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.RemoteName+"/"+branch) // merge base must remote branch, checked in prjgit udate
remote, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, pr.PR.Base.Repo.CloneURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", remote, pr.PR.MergeBase, pr.PR.Head.Ref)
common.LogDebug("Fetch done")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.MergeBase)
common.PanicOnError(err)
new_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.Head.Sha)
common.PanicOnError(err)
@@ -415,6 +354,7 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
}
common.LogDebug(" num of reviewers:", len(prjGitPR.PR.RequestedReviewers))
org, repo, branch := config.GetPrjGit()
maintainers, err := common.FetchProjectMaintainershipData(Gitea, org, repo, branch)
if err != nil {
return err