1 Commits

Author SHA256 Message Date
17ec5c5ea2 prjgit manual merge check 2025-07-28 19:44:52 +02:00
26 changed files with 1025 additions and 1565 deletions

View File

@@ -59,7 +59,6 @@ type AutogitConfig struct {
Reviewers []string // only used by `pr` workflow
ReviewGroups []ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers

View File

@@ -274,17 +274,12 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
}
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--branch", "--hash", branchName)
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--hash", "--verify", "refs/heads/"+branchName)
if err != nil {
return "", fmt.Errorf("Can't find default branch: %s", branchName)
}
id = strings.TrimSpace(SplitLines(id)[0])
if len(id) < 10 {
return "", fmt.Errorf("Can't find branch: %s", branchName)
}
return id, nil
return strings.TrimSpace(id), nil
}
func (e *GitHandlerImpl) GitRemoteHead(gitDir, remote, branchName string) (string, error) {
@@ -350,7 +345,6 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
"GIT_COMMITTER_NAME=" + e.GitCommiter,
"EMAIL=not@exist@src.opensuse.org",
"GIT_LFS_SKIP_SMUDGE=1",
"GIT_LFS_SKIP_PUSH=1",
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
}
if len(ExtraGitParams) > 0 {

View File

@@ -610,7 +610,11 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
}
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(targetId).WithHead(srcId),
repository.NewRepoGetPullRequestByBaseHeadParams().
WithOwner(repo.Owner.UserName).
WithRepo(repo.Name).
WithBase(targetId).
WithHead(srcId),
gitea.transport.DefaultAuthentication,
); err == nil {
return pr.Payload, nil

324
common/listen.go Normal file
View File

@@ -0,0 +1,324 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"runtime/debug"
"slices"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type ListenDefinitions struct {
RabbitURL *url.URL // amqps://user:password@host/queue
GitAuthor string
Handlers map[string]RequestProcessor
Orgs []string
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitMessage rabbitmq.Delivery
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
ch, err := connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer ch.Close()
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.processTopicChanges(ch, q.Name)
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.connectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.processRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
ch := make(chan RabbitMessage, 100)
go l.connectAndProcessRabbitMQ(ch)
return ch
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}
func (l *ListenDefinitions) generateTopics() []string {
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
scope := "suse"
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range l.Orgs {
for requestType, _ := range l.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (l *ListenDefinitions) UpdateTopics() {
newTopics := l.generateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
LogDebug("# Handlers:", len(l.Handlers))
LogDebug("# Orgs:", len(l.Orgs))
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = l.generateTopics()
ch := l.connectToRabbitMQ()
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(l.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
continue
}
LogDebug("org:", org, "type:", reqType)
if handler, found := l.Handlers[reqType]; found {
/* h, err := CreateRequestHandler()
if err != nil {
log.Println("Cannot create request handler", err)
continue
}
*/
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
continue
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
}
}

View File

@@ -50,13 +50,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
u, _ := url.Parse("amqps://rabbit.example.com")
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l := &RabbitMQGiteaEventsProcessor{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
c: &RabbitConnection{
RabbitURL: u,
topicSubChanges: make(chan string, len(test.topicDelta)*10),
},
l := ListenDefinitions{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
topicSubChanges: make(chan string, len(test.topicDelta)*10),
RabbitURL: u,
}
slices.Sort(test.topicDelta)
@@ -66,11 +64,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
}
changes := []string{}
l.c.UpdateTopics(l)
l.UpdateTopics()
a:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
changes = []string{}
@@ -80,13 +78,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
l.Orgs = test.orgs2
l.c.UpdateTopics(l)
l.UpdateTopics()
changes = []string{}
b:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
slices.Sort(changes)

View File

@@ -19,11 +19,13 @@ func (m *Manifest) SubdirForPackage(pkg string) string {
idx := -1
matchLen := 0
basePkg := path.Base(pkg)
lowercasePkg := strings.ToLower(basePkg)
lowercasePkg := strings.ToLower(pkg)
for i, sub := range m.Subdirectories {
basename := strings.ToLower(path.Base(sub))
basename := strings.ToLower(strings.TrimSuffix(sub, "/"))
if idx := strings.LastIndex(basename, "/"); idx > 0 {
basename = basename[idx+1:]
}
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
idx = i
matchLen = len(basename)
@@ -31,7 +33,7 @@ func (m *Manifest) SubdirForPackage(pkg string) string {
}
if idx > -1 {
return path.Join(m.Subdirectories[idx], basePkg)
return path.Join(m.Subdirectories[idx], pkg)
}
return pkg
}

View File

@@ -33,8 +33,8 @@ func TestManifestSubdirAssignments(t *testing.T) {
{
Name: "multilayer subdirs manifest with trailing /",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS"},
},
}

View File

@@ -562,57 +562,25 @@ func (c *ObsClient) DeleteProject(project string) error {
}
return nil
}
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
query := url.Query()
query.Add("nostream", "1")
query.Add("start", "0")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("GET", url.String(), nil)
if err != nil {
return nil, err
}
return res.Body, nil
}
type PackageBuildStatus struct {
Package string `xml:"package,attr"`
Code string `xml:"code,attr"`
Details string `xml:"details"`
LastUpdate time.Time
}
func PackageBuildStatusComp(A, B *PackageBuildStatus) int {
return strings.Compare(A.Package, B.Package)
}
type BuildResult struct {
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []*PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
LastUpdate time.Time
}
func BuildResultComp(A, B *BuildResult) int {
if cmp := strings.Compare(A.Project, B.Project); cmp != 0 {
return cmp
}
if cmp := strings.Compare(A.Repository, B.Repository); cmp != 0 {
return cmp
}
return strings.Compare(A.Arch, B.Arch)
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
}
type Binary struct {
@@ -629,7 +597,7 @@ type BinaryList struct {
type BuildResultList struct {
XMLName xml.Name `xml:"resultlist"`
State string `xml:"state,attr"`
Result []*BuildResult `xml:"result"`
Result []BuildResult `xml:"result"`
isLastBuild bool
}

View File

@@ -1,238 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
type RabbitConnection struct {
RabbitURL *url.URL // amqps://user:password@host/queue
queueName string
ch *rabbitmq.Channel
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitProcessor interface {
GenerateTopics() []string
Connection() *RabbitConnection
ProcessRabbitMessage(msg RabbitMessage) error
}
type RabbitMessage rabbitmq.Delivery
func (l *RabbitConnection) ProcessTopicChanges() {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer l.ch.Close()
if err = l.ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = l.ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = l.ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.ProcessTopicChanges()
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := l.ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.ConnectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.ProcessRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = processor.GenerateTopics()
ch := make(chan RabbitMessage, 100)
go l.ConnectAndProcessRabbitMQ(ch)
return ch
}
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
newTopics := processor.GenerateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
ch := processor.Connection().ConnectToRabbitMQ(processor)
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
if err := processor.ProcessRabbitMessage(msg); err != nil {
LogError("Error processing", msg.RoutingKey, err)
}
}
}

View File

@@ -1,128 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"fmt"
"runtime/debug"
"slices"
"strings"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type RabbitMQGiteaEventsProcessor struct {
Handlers map[string]RequestProcessor
Orgs []string
c *RabbitConnection
}
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
if gitea.c == nil {
gitea.c = &RabbitConnection{}
}
return gitea.c
}
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
scope := "suse"
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range gitea.Orgs {
for requestType, _ := range gitea.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(gitea.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
return nil
}
LogDebug("org:", org, "type:", reqType)
if handler, found := gitea.Handlers[reqType]; found {
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
return nil
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
return fmt.Errorf("Invalid routing key: %s", route)
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}

View File

@@ -1,22 +0,0 @@
package common
type RabbitMQObsBuildStatusProcessor struct {
c *RabbitConnection
}
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
return []string{}
}
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
if o.c == nil {
o.c = &RabbitConnection{}
}
return o.c
}
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
return nil
}

View File

@@ -113,10 +113,6 @@ func (s *Submodule) parseKeyValue(line string) error {
return nil
}
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
return manifest.SubdirForPackage(s.Path)
}
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
data, err := io.ReadAll(reader)
if err != nil {

View File

@@ -23,6 +23,7 @@ import (
"errors"
"flag"
"fmt"
"io/fs"
"log"
"net/url"
"os"
@@ -146,9 +147,7 @@ func listMaintainers(obs *common.ObsClient, prj string, pkgs []string) {
log.Panicln(err)
}
if user != nil {
contact_email = append(contact_email, fmt.Sprintf("%s <%s>", user.Name, user.Email))
}
contact_email = append(contact_email, fmt.Sprintf("%s <%s>", user.Name, user.Email))
}
log.Println(strings.Join(contact_email, ", "))
}
@@ -159,7 +158,6 @@ func gitImporter(prj, pkg string) error {
params = append(params, "-l", "debug")
}
params = append(params, pkg)
common.LogDebug("git-importer", params)
cmd := exec.Command("./git-importer", params...)
if idx := slices.IndexFunc(cmd.Env, func(val string) bool { return val[0:12] == "GITEA_TOKEN=" }); idx != -1 {
cmd.Env = slices.Delete(cmd.Env, idx, idx+1)
@@ -172,113 +170,346 @@ func gitImporter(prj, pkg string) error {
return nil
}
func cloneDevel(git common.Git, gitDir, outName, urlString, remote string, fatal bool) error {
if url, _ := url.Parse(urlString); url != nil {
url.Fragment = ""
urlString = url.String()
}
func cloneDevel(git common.Git, gitDir, outName, urlString string) error {
url, err := url.Parse(urlString)
// branch := url.Fragment
url.Fragment = ""
params := []string{"clone", "-o", remote}
params = append(params, urlString, outName)
params := []string{"clone"}
/* if len(branch) > 0 {
params = append(params, "-b", branch)
}
*/
params = append(params, url.String(), outName)
if fatal {
git.GitExecOrPanic(gitDir, params...)
} else {
git.GitExec(gitDir, params...)
if err != nil {
return fmt.Errorf("error parsing SSH URL. %w", err)
}
git.GitExecOrPanic(gitDir, params...)
return nil
}
func findMissingDevelBranch(git common.Git, pkg, project string) {
d, err := git.GitBranchHead(pkg, "devel")
if err != nil {
if _, err = git.GitBranchHead(pkg, "factory"); err != nil {
log.Println("factory is missing... so maybe repo is b0rked.")
return
}
func importRepos(packages []string) {
RepoToObsName := make(map[string]string)
hash := common.SplitLines(git.GitExecWithOutputOrPanic(pkg,
"log",
"factory",
"--all",
"--grep=build.opensuse.org/package/show/"+project+"/"+pkg,
"-1",
"--pretty=format:%H"))
if len(hash) > 0 {
log.Println(" devel @", hash[0])
git.GitExecOrPanic(pkg, "branch", "devel", hash[0])
factoryRepos := make([]*models.Repository, 0, len(packages)*2)
develProjectPackages := make([]string, 0, len(packages))
for _, pkg := range packages {
src_pkg_name := strings.Split(pkg, ":")
RepoToObsName[giteaPackage(src_pkg_name[0])] = src_pkg_name[0]
repo, err := client.Repository.RepoGet(
repository.NewRepoGetParams().
WithDefaults().WithOwner("pool").WithRepo(giteaPackage(src_pkg_name[0])),
r.DefaultAuthentication)
if err != nil {
if !errors.Is(err, &repository.RepoGetNotFound{}) {
log.Panicln(err)
}
log.Println("Cannot find src package:", src_pkg_name)
develProjectPackages = append(develProjectPackages, src_pkg_name[0])
} else {
git.GitExecOrPanic(pkg, "branch", "devel", "factory")
factoryRepos = append(factoryRepos, repo.Payload)
}
}
log.Println("Num repos found:", len(factoryRepos))
if len(develProjectPackages) > 0 {
log.Println("Num of repos that need to create:", len(develProjectPackages))
log.Println("Create the following packages in pool to continue:", strings.Join(develProjectPackages, " "))
if forceNonPoolPackages {
log.Println(" IGNORING and will create these as non-pool packages!")
} else {
os.Exit(1)
}
} else {
log.Println(" devel already exists?", d)
}
}
func importFactoryRepoAndCheckHistory(pkg string, meta *common.PackageMeta) (factoryRepo *models.Repository, retErr error) {
if repo, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithDefaults().WithOwner("pool").WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
if !errors.Is(err, &repository.RepoGetNotFound{}) {
oldPackageNames := make([]string, 0, len(factoryRepos))
for _, repo := range factoryRepos {
oldPackageNames = append(oldPackageNames, RepoToObsName[repo.Name])
}
// fork packags from pool
for i := 0; i < len(oldPackageNames); {
pkg := oldPackageNames[i]
log.Println(" + package:", pkg)
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
log.Println(" ** failed to import openSUSE:Factory", pkg)
log.Println(" ** falling back to devel project only")
develProjectPackages = append(develProjectPackages, pkg)
oldPackageNames = slices.Delete(oldPackageNames, i, i+1)
} else {
i++
}
}
log.Println("adding remotes...")
for i := 0; i < len(factoryRepos); i++ {
pkg := factoryRepos[i]
pkgName := RepoToObsName[pkg.Name]
gitName := pkg.Name
// verify that package was created by `git-importer`, or it's scmsync package and clone it
fi, err := os.Stat(filepath.Join(git.GetPath(), gitName))
if os.IsNotExist(err) {
if slices.Contains(develProjectPackages, pkgName) {
// failed import of former factory package
log.Println("Failed to import former factory pkg:", pkgName)
continue
}
// scmsync?
devel_project, err := devel_projects.GetDevelProject(pkgName)
if err != nil {
log.Panicln("devel project not found for", RepoToObsName[pkg.Name], "err:", err)
}
meta, _ := obs.GetPackageMeta(devel_project, pkgName)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", gitName, meta.ScmSync); err != nil {
log.Panicln(err2)
}
if err2 := git.GitExec(gitName, "checkout", "-B", "main"); err2 != nil {
git.GitExecOrPanic(gitName, "checkout", "-B", "master")
}
continue
}
// try again, should now exist
if fi, err = os.Stat(filepath.Join(git.GetPath(), gitName)); err != nil {
log.Panicln(err)
}
} else if err != nil {
log.Panicln(err)
} else {
// verify that we do not have scmsync for imported packages
meta, err := obs.GetPackageMeta(prj, pkgName)
if err != nil {
log.Panicln(err)
}
if len(meta.ScmSync) > 0 {
u, err := url.Parse(meta.ScmSync)
if err != nil {
log.Println("Invlid scmsync in", pkg, meta.ScmSync, err)
}
o, err := url.Parse(strings.TrimSpace(git.GitExecWithOutputOrPanic(gitName, "remote", "get-url", "origin")))
log.Println("Invlid scmsync in git repo", pkg, meta.ScmSync, err)
if u.Host != o.Host || u.Path != u.Path {
log.Panicln("importing an scmsync package??:", prj, gitName)
} else {
log.Println("previous SCMSYNC package. Pull.")
git.GitExecOrPanic(gitName, "pull", "origin", "HEAD:main")
}
}
}
log.Println("Cannot find src package:", pkg)
return nil, nil
} else {
factoryRepo = repo.Payload
CreatePoolFork(factoryRepo)
}
if !fi.IsDir() {
log.Panicln("Expected package file should be a directory. It's not.", fi)
}
if _, err := os.Stat(filepath.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
common.LogDebug("Cloning factory...")
cloneDevel(git, "", pkg, factoryRepo.CloneURL, "pool", true) // in case we have imported
} else if err != nil {
common.PanicOnError(err)
} else {
// we have already cloned it... so, fetch pool remote
common.LogDebug("Fetching pool, as already should have the remote")
if err = git.GitExec(pkg, "fetch", "pool"); err != nil {
common.LogError(err)
return factoryRepo, err
// add remote repos
out := git.GitExecWithOutputOrPanic(gitName, "remote", "show", "-n")
switch pkg.Owner.UserName {
case "pool":
if !slices.Contains(strings.Split(out, "\n"), "pool") {
out := git.GitExecWithOutputOrPanic(gitName, "remote", "add", "pool", pkg.CloneURL)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
}
default:
log.Panicln(pkg.Owner.UserName)
}
}
roots := 0
if _, err := git.GitRemoteHead(pkg, "pool", "devel"); err == nil {
factory_roots := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/factory", "--max-parents=0"))
devel_roots := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/devel", "--max-parents=0"))
roots = len(common.SplitLines(factory_roots))
if devel_roots != factory_roots || len(common.SplitLines(factory_roots)) != 1 {
roots = 10
for _, pkgName := range oldPackageNames {
log.Println("fetching git:", pkgName)
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "remote", "show", "-n"), "\n")
params := []string{"fetch", "--multiple"}
params = append(params, remotes...)
out, _ := git.GitExecWithOutput(pkgName, params...)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
if slices.Contains(remotes, "origin") {
log.Println(" --- scmsync already, so we are done")
continue
}
// check if devel is ahead or behind factory and use that as reference
import_branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "^factory", "devel"), "\n")) > 0 {
log.Println(" *** devel ahead. Swtiching branches.")
import_branch = "devel"
}
// check that nothing is broken with the update
if slices.Contains(remotes, "pool") {
// check which branch is ahead
branches, err := fs.ReadDir(os.DirFS(path.Join(git.GetPath(), pkgName, ".git/refs/remotes")), "pool")
if err != nil {
if forceBadPool {
log.Println(" *** factory has no branches!!! Treating as a devel package.")
develProjectPackages = append(develProjectPackages, pkgName)
break
} else {
log.Panicln(" *** factory has no branches", branches)
}
}
pool_branch := "factory"
has_factory_devel := false
has_factory_factory := false
for _, branch := range branches {
if branch.Name() == "factory" {
has_factory_factory = true
} else if branch.Name() == "devel" {
has_factory_devel = true
}
}
log.Println(branches)
if has_factory_devel && has_factory_factory {
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "^pool/factory", "pool/devel"), "\n")) > 0 {
log.Println(" *** pool branch devel ahead. Switching branches.")
pool_branch = "devel"
}
} else if has_factory_devel && !has_factory_factory {
pool_branch = "devel"
} else if !has_factory_devel && has_factory_factory {
} else {
log.Panicln("branches screwed up for pkg", pkgName, branches)
}
// find tree object in factory branch
tree := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "-1", "--format=%T", "--no-commit-header", "pool/"+pool_branch))
log.Println("tree", tree)
import_tree_commits := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "--format=%H %T", "--no-commit-header", import_branch), "\n")
found := false
for i := range import_tree_commits {
commit_tree := strings.Split(import_tree_commits[i], " ")
if len(commit_tree) != 2 {
log.Panicln("wrong format?", commit_tree)
}
if commit_tree[1] == tree {
found = true
cherry_picks := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "--no-merges", "--reverse", "--ancestry-path", commit_tree[0]+".."+import_branch), "\n")
log.Println("cherry picks", cherry_picks)
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "pool/"+pool_branch)
for _, pick := range cherry_picks {
git.GitExecOrPanic(pkgName, "cherry-pick", pick)
}
break
}
}
if !found {
log.Println("*** WARNING: Cannot find same tree for pkg", pkgName, "Will use current import instead")
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
}
} else {
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
}
} else if _, err := git.GitRemoteHead(pkg, "pool", "factory"); err == nil {
items := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/factory", "--max-parents=0"))
roots = len(common.SplitLines(items))
} else {
common.LogInfo("No factory import ...")
}
if roots != 1 {
common.LogError("Expected 1 root in factory, but found", roots)
common.LogError("Ignoring current import")
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), pkg)))
retErr = fmt.Errorf("Invalid factory repo -- treating as devel project only")
return
for i := 0; i < len(develProjectPackages); i++ {
pkg := develProjectPackages[i]
meta, err := obs.GetPackageMeta(prj, pkg)
if err != nil {
meta, err = obs.GetPackageMeta(prj, pkg)
if err != nil {
log.Println("Error fetching pkg meta for:", prj, pkg, err)
}
}
if meta == nil {
log.Println(" **** pkg meta is nil? ****")
} else if len(meta.ScmSync) > 0 {
if _, err := os.Stat(path.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
}
continue
} else {
common.PanicOnError(gitImporter(prj, pkg))
if out, err := git.GitExecWithOutput(pkg, "show-ref", "--branches"); err != nil || len(common.SplitStringNoEmpty(out, "\n")) == 0 {
log.Println(" *** no branches in package. removing")
develProjectPackages = slices.Delete(develProjectPackages, i, i+1)
i--
continue
}
}
// mark newer branch as main
branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "rev-list", "^factory", "devel"), "\n")) > 0 {
log.Println(" *** pool branch 'devel' ahead. Switching branches.")
branch = "devel"
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main", branch)
}
devel_project, err := devel_projects.GetDevelProject(pkg)
common.LogDebug("Devel project:", devel_project, err)
if err == common.DevelProjectNotFound {
// assume it's this project, maybe removed from factory
devel_project = prj
}
common.LogDebug("finding missing branches in", pkg, devel_project)
findMissingDevelBranch(git, pkg, devel_project)
return
}
slices.SortFunc(factoryRepos, func(a, b *models.Repository) int {
if a.Name == b.Name {
orgOrderNo := func(org string) int {
switch org {
case "pool":
return 1
}
return 0 // current devel to clone
}
func SetRepoOptions(repo *models.Repository) {
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(
&models.EditRepoOption{
return orgOrderNo(a.Owner.UserName) - orgOrderNo(b.Owner.UserName)
}
return strings.Compare(a.Name, b.Name)
})
factoryRepos = slices.CompactFunc(factoryRepos, func(a, b *models.Repository) bool {
return a.Name == b.Name
})
for _, pkg := range factoryRepos {
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg.Name), r.DefaultAuthentication); err != nil {
// update package
fork, err := client.Repository.CreateFork(repository.NewCreateForkParams().
WithOwner(pkg.Owner.UserName).
WithRepo(pkg.Name).
WithBody(&models.CreateForkOption{
Organization: org,
}), r.DefaultAuthentication)
if err != nil {
log.Panicln("Error while trying to create fork from", pkg.Owner.UserName, pkg.Name, "to", org, ":", err)
}
repo = fork.Payload
} else {
repo = repoData.Payload
}
// branchName := repo.DefaultBranch
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg.Name, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkgName, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg.Name, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg.Name, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg.Name, "push", "develorigin", "main", "-f")
git.GitExec(pkg.Name, "push", "develorigin", "--delete", "factory", "devel")
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(repo.Name)).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
HasPullRequests: true,
HasPackages: false,
HasReleases: false,
@@ -290,305 +521,82 @@ func SetRepoOptions(repo *models.Repository) {
AllowRebaseUpdate: false,
AllowManualMerge: true,
AutodetectManualMerge: true,
DefaultMergeStyle: "fast-forward-only",
AllowRebase: false,
DefaultAllowMaintainerEdit: true,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
if err != nil {
log.Panicln("Failed to adjust repository:", repo.Name, err)
}
}
func CreatePoolFork(factoryRepo *models.Repository) *models.Repository {
pkg := factoryRepo.Name
log.Println("factory fork creator for develProjectPackage:", pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
// update package
fork, err := client.Repository.CreateFork(repository.NewCreateForkParams().
WithOwner("pool").
WithRepo(factoryRepo.Name).
WithBody(&models.CreateForkOption{
Organization: org,
}), r.DefaultAuthentication)
if err != nil {
log.Panicln("Error while trying to create fork from 'pool'", pkg, "to", org, ":", err)
}
repo := fork.Payload
return repo
} else {
return repoData.Payload
}
}
func CreateDevelOnlyPackage(pkg string) *models.Repository {
log.Println("repo creator for develProjectPackage:", pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
giteaPkg := giteaPackage(pkg)
repoData, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &giteaPkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
}), r.DefaultAuthentication)
if err != nil {
log.Panicln("Error creating new package repository:", pkg, err)
log.Panicln("Failed to set default branch for package fork:", repo.Owner.UserName, "/", repo.Name, err)
}
repo := repoData.Payload
return repo
} else {
return repoData.Payload
}
}
func PushRepository(factoryRepo, develRepo *models.Repository, pkg string) (repo *models.Repository) {
// branchName := repo.DefaultBranch
if factoryRepo != nil {
repo = CreatePoolFork(factoryRepo)
for _, pkg := range develProjectPackages {
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
giteaPkg := giteaPackage(pkg)
_, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &giteaPkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
/*
devel
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg, "push", "develorigin", "--delete", b)
}
}
}*/
//branches := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "branch", "-r"), "\n")
/* factory
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg.Name, "push", "develorigin", "--delete", b)
}
if err != nil {
log.Panicln("Error creating new package repository:", pkg, err)
}
}
*/
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
} else {
repo = CreateDevelOnlyPackage(pkg)
}
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkgName, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg, "lfs", "push", "develorigin", "--all")
}
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPkg).WithBody(
&models.EditRepoOption{
HasPullRequests: true,
HasPackages: false,
HasReleases: false,
HasActions: false,
AllowMerge: true,
AllowRebaseMerge: false,
AllowSquash: false,
AllowFastForwardOnly: true,
AllowRebaseUpdate: false,
AllowManualMerge: true,
AutodetectManualMerge: true,
DefaultMergeStyle: "fast-forward-only",
AllowRebase: false,
DefaultAllowMaintainerEdit: true,
}),
r.DefaultAuthentication,
)
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
SetRepoOptions(repo)
git.GitExec(pkg, "push", "develorigin", "--delete", "factory")
git.GitExec(pkg, "push", "develorigin", "--delete", "devel")
git.GitExec(pkg, "push", "develorigin", "--delete", "leap-16.0")
return repo
}
func importDevelRepoAndCheckHistory(pkg string, meta *common.PackageMeta) *models.Repository {
repo := CreateDevelOnlyPackage(pkg)
if _, err := os.Stat(filepath.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
cloneDevel(git, "", pkg, repo.SSHURL, "develorigin", false) // in case we have imported
}
if CloneScmsync(pkg, meta) {
return repo
}
var p, dp string
factory_branch, fhe := git.GitRemoteHead(pkg, "develorigin", "factory")
if fhe == nil {
p = strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", factory_branch))
} else {
common.LogError(fhe)
}
devel_branch, dhe := git.GitRemoteHead(pkg, "develorigin", "devel")
if dhe != nil {
devel_project, err := devel_projects.GetDevelProject(pkg)
common.LogDebug("Devel project:", devel_project, err)
if err == common.DevelProjectNotFound {
// assume it's this project, maybe removed from factory
devel_project = prj
}
common.LogDebug("finding missing branches in", pkg, devel_project)
findMissingDevelBranch(git, pkg, devel_project)
devel_branch, dhe = git.GitBranchHead(pkg, "devel")
}
if dhe == nil {
dp = strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", devel_branch))
} else {
common.LogError(dhe)
}
// even if one parent for both, we need common ancestry, or we are comparing different things.
mb, mb_err := git.GitExecWithOutput(pkg, "merge-base", factory_branch, devel_branch)
mb = strings.TrimSpace(mb)
if p != "1" || dp != "1" || mb_err != nil || mb != factory_branch || mb != devel_branch {
common.LogInfo("Bad export found ... clearing", p, dp)
common.LogInfo(" merge branch:", mb, factory_branch, devel_branch, mb_err)
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), pkg)))
}
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
common.PanicOnError(gitImporter(prj, pkg))
}
if p := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", "factory")); p != "1" {
common.LogError("Failed to import package:", pkg)
common.PanicOnError(fmt.Errorf("Expecting 1 root in after devel import, but have %s", p))
}
if out, err := git.GitExecWithOutput(pkg, "show-ref", "--branches"); err != nil || len(common.SplitStringNoEmpty(out, "\n")) == 0 {
common.LogError(" *** no branches in package. removing")
return repo
}
return repo
}
func SetMainBranch(pkg string, meta *common.PackageMeta) {
// scnsync, follow that and don't care
common.LogDebug("Setting main branch...")
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if slices.Contains(remotes, "origin") {
u, err := url.Parse(meta.ScmSync)
common.PanicOnError(err)
if len(u.Fragment) == 0 {
u.Fragment = "HEAD"
}
if err := git.GitExec(pkg, "checkout", "-B", "main", u.Fragment); err != nil {
git.GitExecOrPanic(pkg, "checkout", "-B", "main", "origin/"+u.Fragment)
}
return
}
// check if we have factory
if _, err := git.GitBranchHead(pkg, "factory"); err != nil {
if len(git.GitExecWithOutputOrPanic(pkg, "show-ref", "pool/factory")) > 20 {
git.GitExecOrPanic(pkg, "branch", "factory", "pool/factory")
}
}
// mark newer branch as main
branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "rev-list", "^factory", "devel"), "\n")) > 0 {
branch = "devel"
}
common.LogInfo("setting main to", branch)
git.GitExecOrPanic(pkg, "checkout", "-B", "main", branch)
}
func ObsToRepoName(obspkg string) string {
return strings.ReplaceAll(obspkg, "+", "_")
}
func ImportSha1Sync(pkg string, url *url.URL) {
common.LogDebug("Converting SHA1", url.String())
branch := url.Fragment
url.Fragment = ""
p := path.Join(pkg, "sha1stuff")
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), p)))
git.GitExecOrPanic(pkg, "clone", "--mirror", url.String(), "sha1stuff")
git.GitExecOrPanic(p, "fetch", "origin", branch)
gitexport := exec.Command("/usr/bin/git", "fast-export", "--signed-tags=strip", "--tag-of-filtered-object=drop", "--all")
gitexport.Dir = path.Join(git.GetPath(), p)
gitexportData, err := gitexport.Output()
common.LogDebug("Got export data size:", len(gitexportData))
common.PanicOnError(err)
gitimport := exec.Command("/usr/bin/git", "fast-import", "--allow-unsafe-features")
gitimport.Dir = path.Join(git.GetPath(), pkg)
gitimport.Stdin = bytes.NewReader(gitexportData)
data, err := gitimport.CombinedOutput()
common.LogError(string(data))
common.PanicOnError(err)
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), p)))
}
func LfsImport(pkg string) {
git.GitExecOrPanic(pkg, "lfs", "migrate", "import", "--everything",
"--include=*.7z,*.bsp,*.bz2,*.gem,*.gz,*.jar,*.lz,*.lzma,*.obscpio,*.oxt,*.pdf,*.png,*.rpm,*.tar,*.tbz,*.tbz2,*.tgz,*.ttf,*.txz,*.whl,*.xz,*.zip,*.zst")
}
func CloneScmsync(pkg string, meta *common.PackageMeta) bool {
if len(meta.ScmSync) > 0 {
u, _ := url.Parse(meta.ScmSync)
if remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n"); !slices.Contains(remotes, "origin") {
branch := u.Fragment
if len(branch) == 0 {
branch = "HEAD"
if err != nil {
log.Panicln("Failed to adjust repository:", pkg, err)
}
u.Fragment = ""
git.GitExecOrPanic(pkg, "remote", "add", "origin", u.String())
u.Fragment = branch
}
if err := git.GitExec(pkg, "fetch", "origin"); err != nil && strings.Contains(err.Error(), "fatal: mismatched algorithms: client sha256; server sha1") {
ImportSha1Sync(pkg, u)
} else if err != nil {
panic(err)
repo = ret.Payload
} else {
repo = repoData.Payload
}
LfsImport(pkg)
return true
}
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg, "remote", "add", "develorigin", repo.SSHURL)
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
git.GitExec(pkg, "push", "develorigin", "--delete", "factory", "devel")
return false
}
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
}), r.DefaultAuthentication)
func importRepo(pkg string) (BrokenFactoryPackage, FailedImport bool) {
BrokenFactoryPackage = false
FailedImport = false
var develRepo, factoryRepo *models.Repository
src_pkg_name := strings.Split(pkg, ":")
pkg = src_pkg_name[0]
meta, err := obs.GetPackageMeta(prj, pkg)
if err != nil {
meta, err = obs.GetPackageMeta(prj, pkg)
if err != nil {
log.Println("Error fetching pkg meta for:", prj, pkg, err)
log.Panicln("Failed to set default branch for package fork:", repo.Owner.UserName, "/", repo.Name, err)
}
}
if err != nil {
common.PanicOnError(err)
}
if meta == nil {
panic("package meta is nil...")
}
factoryRepo, err = importFactoryRepoAndCheckHistory(pkg, meta)
if factoryRepo != nil && err != nil {
BrokenFactoryPackage = true
}
if factoryRepo == nil && forceNonPoolPackages {
log.Println(" IGNORING and will create these as non-pool packages!")
}
if factoryRepo == nil || BrokenFactoryPackage {
develRepo = importDevelRepoAndCheckHistory(pkg, meta)
} else {
CloneScmsync(pkg, meta)
}
SetMainBranch(pkg, meta)
PushRepository(factoryRepo, develRepo, pkg)
return
}
func syncOrgTeams(groupName string, origTeam []common.PersonRepoMeta) []string {
@@ -731,11 +739,6 @@ func syncMaintainersToGitea(pkgs []string) {
missingDevs := []string{}
devs := []string{}
if len(prjMeta.ScmSync) > 0 {
common.LogInfo("Project already in Git. Maintainers must have been already synced. Skipping...")
return
}
for _, group := range prjMeta.Groups {
if group.GroupID == "factory-maintainers" {
log.Println("Ignoring factory-maintainers")
@@ -854,17 +857,6 @@ func createPrjGit() {
}
}
func TrimMultibuildPackages(packages []string) []string {
for i := 0; i < len(packages); {
if strings.Contains(packages[i], ":") {
packages = slices.Delete(packages, i, i+1)
} else {
i++
}
}
return packages
}
var client *apiclient.GiteaAPI
var r *transport.Runtime
var git common.Git
@@ -899,19 +891,13 @@ func main() {
syncMaintainers := flags.Bool("sync-maintainers-only", false, "Sync maintainers to Gitea and exit")
flags.BoolVar(&forceBadPool, "bad-pool", false, "Force packages if pool has no branches due to bad import")
flags.BoolVar(&forceNonPoolPackages, "non-pool", false, "Allow packages that are not in pool to be created. WARNING: Can't add to factory later!")
specificPackages := flags.String("packages", "", "Process specific package, separated by commas, ignoring the others")
resumeAt := flags.String("resume", "", "Resume import at given pacakge")
syncPool := flags.Bool("sync-pool-only", false, "Force updates pool based on currrently imported project")
specificPackage := flags.String("package", "", "Process specific package only, ignoring the others")
if help := flags.Parse(os.Args[1:]); help == flag.ErrHelp || flags.NArg() != 2 {
printHelp(helpString.String())
return
}
if DebugMode {
common.SetLoggingLevel(common.LogLevelDebug)
}
r = transport.New(*giteaHost, apiclient.DefaultBasePath, [](string){"https"})
r.DefaultAuthentication = transport.BearerToken(common.GetGiteaToken())
// r.SetDebug(true)
@@ -946,7 +932,13 @@ func main() {
prj = flags.Arg(0)
org = flags.Arg(1)
packages, err := runObsCommand("ls", prj)
packages = TrimMultibuildPackages(packages)
for i := 0; i < len(packages); {
if strings.Contains(packages[i], ":") {
packages = slices.Delete(packages, i, i+1)
} else {
i++
}
}
git, err = gh.CreateGitHandler(org)
if err != nil {
@@ -955,27 +947,6 @@ func main() {
defer git.Close()
log.Println(" - working directory:" + git.GetPath())
if *syncPool {
factory_pkgs, err := runObsCommand("ls", "openSUSE:Factory")
common.PanicOnError(err)
common.LogInfo("Syncing pool only...")
factory_pkgs = TrimMultibuildPackages(factory_pkgs)
for _, pkg := range packages {
if !slices.Contains(factory_pkgs, pkg) {
continue
}
repo, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(ObsToRepoName(pkg)), r.DefaultAuthentication)
common.PanicOnError(err)
if !slices.Contains(common.SplitLines(git.GitExecWithOutputOrPanic(pkg, "remote")), "pool") {
git.GitExecOrPanic(pkg, "remote", "add", "pool", repo.Payload.SSHURL)
}
git.GitExecOrPanic(pkg, "fetch", "pool")
}
}
/*
for _, pkg := range packages {
if _, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
@@ -1016,42 +987,16 @@ func main() {
if *purgeOnly {
log.Println("Purging repositories...")
pkgs := packages
if len(*specificPackages) > 0 {
pkgs = common.SplitStringNoEmpty(*specificPackages, ",")
}
for _, pkg := range pkgs {
for _, pkg := range packages {
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
}
os.Exit(10)
}
if len(*specificPackages) != 0 {
packages = common.SplitStringNoEmpty(*specificPackages, ",")
if len(*specificPackage) != 0 {
importRepos([]string{*specificPackage})
return
}
slices.Sort(packages)
BrokenOBSPackage := []string{}
FailedImport := []string{}
for _, pkg := range packages {
if len(*resumeAt) > 0 && strings.Compare(*resumeAt, pkg) > 0 {
common.LogDebug(pkg, "skipped due to resuming at", *resumeAt)
continue
}
b, f := importRepo(pkg)
if b {
BrokenOBSPackage = append(BrokenOBSPackage, pkg)
}
if f {
FailedImport = append(FailedImport, pkg)
}
}
importRepos(packages)
syncMaintainersToGitea(packages)
common.LogError("Have broken pool packages:", len(BrokenOBSPackage))
// common.LogError("Total pool packages:", len(factoryRepos))
common.LogError("Failed to import:", strings.Join(FailedImport, ","))
common.LogInfo("BROKEN Pool packages:", strings.Join(BrokenOBSPackage, "\n"))
}

3
go.mod
View File

@@ -16,8 +16,6 @@ require (
require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
@@ -30,7 +28,6 @@ require (
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/redis/go-redis/v9 v9.11.0 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect

6
go.sum
View File

@@ -1,12 +1,8 @@
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -54,8 +50,6 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=

View File

@@ -1,2 +1 @@
obs-status-service
*.svg

View File

@@ -22,13 +22,8 @@ import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"slices"
"strings"
"time"
"src.opensuse.org/autogits/common"
)
@@ -38,18 +33,27 @@ const (
)
var obs *common.ObsClient
var debug bool
func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
list := common.BuildResultList{
Result: res,
func LogDebug(v ...any) {
if debug {
log.Println(v...)
}
pkgs := list.GetPackageList()
}
func ProjectStatusSummarySvg(project string) []byte {
res := GetCurrentStatus(project)
if res == nil {
return nil
}
pkgs := res.GetPackageList()
maxLen := 0
for _, p := range pkgs {
maxLen = max(maxLen, len(p))
}
width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
width := float32(len(res.Result))*1.5 + float32(maxLen)*0.8
height := 1.5*float32(maxLen) + 30
ret := bytes.Buffer{}
@@ -60,78 +64,21 @@ func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
ret.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
ret.WriteString(`<defs>
<g id="f"> <!-- failed -->
<rect width="8em" height="1.5em" fill="#800" />
<rect width="1em" height="1em" fill="#800" />
</g>
<g id="s"> <!--succeeded-->
<rect width="8em" height="1.5em" fill="#080" />
<rect width="1em" height="1em" fill="#080" />
</g>
<g id="buidling"> <!--building-->
<rect width="8em" height="1.5em" fill="#880" />
<rect width="1em" height="1em" fill="#880" />
</g>
</defs>`)
ret.WriteString(`<use href="#f" x="1em" y="2em"/>`)
ret.WriteString(`</svg>`)
return ret.Bytes()
}
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
if R != nil && S != nil {
switch S.Code {
case "succeeded", "failed", "building":
return "/buildlog/" + R.Project + "/" + S.Package + "/" + R.Repository + "/" + R.Arch
}
}
return ""
}
func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
// per repo, per arch status bins
repo_names := []string{}
package_names := []string{}
multibuild_prefix := pkg + ":"
for _, r := range res {
if pos, found := slices.BinarySearchFunc(repo_names, r.Repository, strings.Compare); !found {
repo_names = slices.Insert(repo_names, pos, r.Repository)
}
for _, p := range r.Status {
if p.Package == pkg || strings.HasPrefix(p.Package, multibuild_prefix) {
if pos, found := slices.BinarySearchFunc(package_names, p.Package, strings.Compare); !found {
package_names = slices.Insert(package_names, pos, p.Package)
}
}
}
}
ret := NewSvg()
for _, pkg = range package_names {
// if len(package_names) > 1 {
ret.WriteTitle(pkg)
// }
for _, name := range repo_names {
ret.WriteSubtitle(name)
// print all repo arches here and build results
for _, r := range res {
if r.Repository != name {
continue
}
for _, s := range r.Status {
if s.Package == pkg {
link := LinkToBuildlog(r, s)
ret.WritePackageStatus(link, r.Arch, s.Code, s.Details)
}
}
}
}
}
return ret.GenerateSvg()
}
func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus) []byte {
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
if !ok {
buildStatus = common.ObsBuildStatusDetails["error"]
@@ -148,18 +95,12 @@ func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus)
}
}
buildlog := LinkToBuildlog(repo, status)
startTag := ""
endTag := ""
log.Println(status, " -> ", buildStatus)
if len(buildlog) > 0 {
startTag = "<a href=\"" + buildlog + "\">"
endTag = "</a>"
}
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">` +
`<rect width="100%" height="100%" fill="` + fillColor + `"/>` + startTag +
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>` + endTag + `</svg>`)
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="` + fillColor + `"/>
<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>
</svg>`)
}
func main() {
@@ -167,119 +108,59 @@ func main() {
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
obsUrl := flag.String("obs-url", "https://api.opensuse.org", "OBS API endpoint for package buildlog information")
debug := flag.Bool("debug", false, "Enable debug logging")
// RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
// Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
flag.Parse()
if *debug {
common.SetLoggingLevel(common.LogLevelDebug)
}
// common.PanicOnError(common.RequireObsSecretToken())
common.PanicOnError(common.RequireObsSecretToken())
var err error
if obs, err = common.NewObsClient(*obsUrl); err != nil {
if obs, err = common.NewObsClient(*obsHost); err != nil {
log.Fatal(err)
}
if redisUrl := os.Getenv("REDIS"); len(redisUrl) > 0 {
RedisConnect(redisUrl)
} else {
common.LogError("REDIS needs to contains URL of the OBS Redis instance with login information")
return
}
go func() {
for {
if err := RescanRepositories(); err != nil {
common.LogError("Failed to rescan repositories.", err)
}
time.Sleep(time.Minute * 5)
}
}()
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
common.LogInfo(" request: GET /status/" + obsPrj)
http.HandleFunc("GET /{Project}", func(res http.ResponseWriter, req *http.Request) {
res.WriteHeader(http.StatusBadRequest)
})
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
http.HandleFunc("GET /{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
/*
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
status := FindAndUpdateProjectResults(obsPrj)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
status, _ := PackageBuildStatus(obsPrj, obsPkg)
svg := PackageStatusSummarySvg(status)
*/
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
//res.Header().Add("size", fmt.Sprint(len(svg)))
//res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
status := FindAndUpdateRepoResults(obsPrj, repo)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
common.LogInfo("GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch)
res.Header().Add("content-type", "image/svg+xml")
for _, r := range FindAndUpdateProjectResults(prj) {
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
return
}
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
if idx, found := slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res.Write(BuildStatusSvg(r, r.Status[idx]))
return
for _, status := range r.Status {
if status.Package == pkg {
res.Write(PackageStatusSummarySvg(status))
return
}
}
break
}
}
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
res.Header().Add("location", "https://build.opensuse.org/package/live_build_log/"+prj+"/"+pkg+"/"+repo+"/"+arch)
res.WriteHeader(307)
return
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
data, err := obs.BuildLog(prj, pkg, repo, arch)
if err != nil {
res.WriteHeader(http.StatusInternalServerError)
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
return
}
defer data.Close()
io.Copy(res, data)
})
go ProcessUpdates()
if *disableTls {
log.Fatal(http.ListenAndServe(*listen, nil))

View File

@@ -1,82 +0,0 @@
package main
import (
"os"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusSvg(t *testing.T) {
os.WriteFile("teststatus.svg", BuildStatusSvg(nil, &common.PackageBuildStatus{
Package: "foo",
Code: "succeeded",
Details: "more success here",
}), 0o777)
data := []*common.BuildResult{
{
Project: "project:foo",
Repository: "repo1",
Arch: "x86_64",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "s390x",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "unresolveable",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "i586",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "blocked",
Details: "foo bar is why",
},
},
},
{
Project: "project:foo",
Repository: "TW",
Arch: "s390",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "excluded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
}
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
}

View File

@@ -1,214 +0,0 @@
package main
import (
"context"
"slices"
"strings"
"sync"
"time"
"github.com/redis/go-redis/v9"
"src.opensuse.org/autogits/common"
)
var RepoStatus []*common.BuildResult = []*common.BuildResult{}
var RepoStatusLock *sync.RWMutex = &sync.RWMutex{}
var redisClient *redis.Client
func RedisConnect(RedisUrl string) {
opts, err := redis.ParseURL(RedisUrl)
if err != nil {
panic(err)
}
redisClient = redis.NewClient(opts)
}
func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock()
defer RepoStatusLock.Unlock()
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
common.LogDebug(" + Updating", key)
data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil {
common.LogError("Failed fetching build results for", key, err)
}
common.LogDebug(" + Update size", len(data))
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status {
pkg.LastUpdate = reset_time
}
r.LastUpdate = time.Now()
for pkg, result := range data {
if strings.HasPrefix(result, "scheduled") {
// TODO: lookup where's building
result = "building"
}
var idx int
var found bool
var code string
var details string
if pos := strings.IndexByte(result, ':'); pos > -1 && pos < len(result) {
code = result[0:pos]
details = result[pos+1:]
} else {
code = result
details = ""
}
if idx, found = slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res := r.Status[idx]
res.LastUpdate = r.LastUpdate
res.Code = code
res.Details = details
} else {
r.Status = slices.Insert(r.Status, idx, &common.PackageBuildStatus{
Package: pkg,
Code: code,
Details: details,
LastUpdate: r.LastUpdate,
})
}
}
for idx := 0; idx < len(r.Status); {
if r.Status[idx].LastUpdate == reset_time {
r.Status = slices.Delete(r.Status, idx, idx+1)
} else {
idx++
}
}
}
func FindProjectResults(project string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindRepoResults(project, repo string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project, Repository: repo}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project && RepoStatus[idx].Repository == repo {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func FindAndUpdateRepoResults(project, repo string) []*common.BuildResult {
res := FindRepoResults(project, repo)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func RescanRepositories() error {
ctx := context.Background()
var cursor uint64
var err error
common.LogDebug("** starting rescanning ...")
RepoStatusLock.Lock()
for _, repo := range RepoStatus {
repo.Dirty = false
}
RepoStatusLock.Unlock()
var count int
for {
var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
if err != nil {
return err
}
RepoStatusLock.Lock()
for _, repo := range data {
r := strings.Split(repo, "/")
if len(r) != 3 || len(r[0]) < 8 || r[0][0:7] != "result." {
continue
}
d := &common.BuildResult{
Project: r[0][7:],
Repository: r[1],
Arch: r[2],
}
if pos, found := slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true
} else {
d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d)
count++
}
}
RepoStatusLock.Unlock()
if cursor == 0 {
break
}
}
common.LogDebug(" added a total", count, "repos")
count = 0
RepoStatusLock.Lock()
for i := 0; i < len(RepoStatus); {
if !RepoStatus[i].Dirty {
RepoStatus = slices.Delete(RepoStatus, i, i+1)
count++
} else {
i++
}
}
RepoStatusLock.Unlock()
common.LogDebug(" removed", count, "repos")
common.LogDebug(" total repos:", len(RepoStatus))
return nil
}

View File

@@ -0,0 +1,82 @@
package main
import (
"log"
"slices"
"sync"
"time"
"src.opensuse.org/autogits/common"
)
var WatchedRepos []string
var mutex sync.Mutex
var StatusUpdateCh chan StatusUpdateMsg = make(chan StatusUpdateMsg)
var statusMutex sync.RWMutex
var CurrentStatus map[string]*common.BuildResultList = make(map[string]*common.BuildResultList)
type StatusUpdateMsg struct {
ObsProject string
Result *common.BuildResultList
}
func GetCurrentStatus(project string) *common.BuildResultList {
statusMutex.RLock()
defer statusMutex.RUnlock()
if ret, found := CurrentStatus[project]; found {
return ret
} else {
go WatchObsProject(obs, project)
return nil
}
}
func ProcessUpdates() {
for {
msg := <-StatusUpdateCh
statusMutex.Lock()
CurrentStatus[msg.ObsProject] = msg.Result
drainedChannel:
for {
select {
case msg = <-StatusUpdateCh:
CurrentStatus[msg.ObsProject] = msg.Result
default:
statusMutex.Unlock()
break drainedChannel
}
}
}
}
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
old_state := ""
mutex.Lock()
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
mutex.Unlock()
return
} else {
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
mutex.Unlock()
}
LogDebug("+ watching", ObsProject)
opts := common.BuildResultOptions{}
for {
state, err := obs.BuildStatusWithState(ObsProject, &opts)
if err != nil {
log.Println(" *** Error fetching build for", ObsProject, err)
time.Sleep(time.Minute)
} else {
opts.OldState = state.State
LogDebug(" --> update", ObsProject, " => ", old_state)
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
}
}
}

View File

@@ -0,0 +1,34 @@
package main
import (
"testing"
"go.uber.org/mock/gomock"
"src.opensuse.org/autogits/common"
mock_common "src.opensuse.org/autogits/common/mock"
)
func TestWatchObsProject(t *testing.T) {
tests := []struct {
name string
res common.BuildResultList
}{
{
name: "two requests",
res: common.BuildResultList{
State: "success",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctl := gomock.NewController(t)
obs := mock_common.NewMockObsStatusFetcherWithState(ctl)
obs.EXPECT().BuildStatusWithState("test:foo", "").Return(&test.res, nil)
WatchObsProject(obs, "test:foo")
})
}
}

View File

@@ -1,119 +0,0 @@
package main
import (
"bytes"
"fmt"
"slices"
)
type SvgWriter struct {
ypos float64
header []byte
out bytes.Buffer
}
func NewSvg() *SvgWriter {
svg := &SvgWriter{}
svg.header = []byte(`<svg version="2.0" overflow="auto" width="40ex" height="`)
svg.out.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
return svg
}
func (svg *SvgWriter) WriteTitle(title string) {
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + title + "</text>")
svg.ypos += 2.5
}
func (svg *SvgWriter) WriteSubtitle(subtitle string) {
svg.out.WriteString(`<use href="#repotitle" y="` + fmt.Sprint(svg.ypos-2) + `em"/>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + subtitle + `</text>`)
svg.ypos += 2
}
func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
StatusToSVG := func(S string) string {
switch S {
case "succeeded":
return "s"
case "failed":
return "f"
case "broken", "scheduled":
return S
case "blocked":
return "b"
case "building":
return "build"
case "unresolvable":
return "u"
case "disabled":
return "d"
case "excluded":
return "e"
}
return "un"
}
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + arch + `</text>`)
svg.out.WriteString(`<g>`)
if len(loglink) > 0 {
svg.out.WriteString(`<a href="` + loglink + `">`)
}
svg.out.WriteString(`<use href="#` + StatusToSVG(status) + `" x="20ex" y="` + fmt.Sprint(svg.ypos-1.7) + `em"/>`)
if len(loglink) > 0 {
svg.out.WriteString(`</a>`)
}
if len(detail) > 0 {
svg.out.WriteString(`<title>` + fmt.Sprint(detail) + "</title>")
}
svg.out.WriteString("</g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) GenerateSvg() []byte {
return slices.Concat(svg.header, []byte(fmt.Sprint(svg.ypos)), svg.out.Bytes(), []byte("</svg>"))
}

View File

@@ -526,7 +526,7 @@ func main() {
log.Fatal(err)
}
defs := &common.RabbitMQGiteaEventsProcessor{}
var defs common.ListenDefinitions
var err error
if len(*basePath) == 0 {
@@ -557,7 +557,7 @@ func main() {
}
log.Println("*** Reconfiguring ***")
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().UpdateTopics(defs)
defs.UpdateTopics()
}
}()
signal.Notify(signalChannel, syscall.SIGHUP)
@@ -573,17 +573,18 @@ func main() {
updateConfiguration(*configFilename, &defs.Orgs)
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
defs.GitAuthor = GitAuthor
defs.RabbitURL, err = url.Parse(*rabbitUrl)
if err != nil {
log.Panicf("cannot parse server URL. Err: %#v\n", err)
}
go consistencyCheckProcess()
log.Println("defs:", *defs)
log.Println("defs:", defs)
defs.Handlers = make(map[string]common.RequestProcessor)
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
log.Fatal(common.ProcessRabbitMQEvents(defs))
log.Fatal(defs.ProcessRabbitMQEvents())
}

View File

@@ -162,9 +162,9 @@ func main() {
checker := CreateDefaultStateChecker(*checkOnStart, req, Gitea, time.Duration(*checkIntervalHours)*time.Hour)
go checker.ConsistencyCheckProcess()
listenDefs := &common.RabbitMQGiteaEventsProcessor{
listenDefs := common.ListenDefinitions{
Orgs: orgs,
// GitAuthor: GitAuthor,
GitAuthor: GitAuthor,
Handlers: map[string]common.RequestProcessor{
common.RequestType_PR: req,
common.RequestType_PRSync: req,
@@ -172,7 +172,7 @@ func main() {
common.RequestType_PRReviewRejected: req,
},
}
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
listenDefs.RabbitURL, _ = url.Parse(*rabbitUrl)
common.PanicOnError(common.ProcessRabbitMQEvents(listenDefs))
common.PanicOnError(listenDefs.ProcessRabbitMQEvents())
}

View File

@@ -23,6 +23,9 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
refs := make([]string, 0, len(prset.PRs)-1)
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
org, repo, idx := pr.PRComponents()
title_refs = append(title_refs, repo)
@@ -31,16 +34,16 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
}
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ",\n")
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, "\n") + "\n"
if prset.Config.ManualMergeOnly {
desc = desc + "\n\nManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
desc = desc + "\n### ManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
}
if prset.Config.ManualMergeProject {
desc = desc + "\nManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
desc = desc + "\n### ManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
}
if !prset.Config.ManualMergeOnly && !prset.Config.ManualMergeProject {
desc = desc + "\nAutomatic merge enabled. This will merge when all review requirements are satisfied."
desc = desc + "\n### Automatic merge enabled. This will merge when all review requirements are satisfied."
}
return
}
@@ -205,8 +208,11 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
return err
}
if !common.IsDryRun && headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
if !common.IsDryRun {
if headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
}
title, desc := PrjGitDescription(prset)
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
if err != nil {
@@ -286,20 +292,24 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
return err
}
if !common.IsDryRun && headCommit != newHeadCommit {
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
if !common.IsDryRun {
if headCommit != newHeadCommit {
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
// update PR
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
RemoveDeadline: true,
Title: PrjGitTitle,
Body: PrjGitBody,
})
if PrjGitPR.PR.Body != PrjGitBody || PrjGitPR.PR.Title != PrjGitTitle {
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
RemoveDeadline: true,
Title: PrjGitTitle,
Body: PrjGitBody,
})
}
}
return nil
}
@@ -336,7 +346,38 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
prjGitPRbranch = prjGitPR.PR.Head.Name
if prjGitPR.PR.State != "open" {
// close entire prset
if prjGitPR.PR.HasMerged {
// update branches in project
prjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, prjGitPRbranch, prjGitPR.PR.Base.Repo.SSHURL)
common.PanicOnError(err)
old_pkgs, err := git.GitSubmoduleList(common.DefaultGitPrj, prjGitPR.PR.MergeBase)
common.PanicOnError(err)
new_pkgs, err := git.GitSubmoduleList(common.DefaultGitPrj, prjGitPRbranch)
common.PanicOnError(err)
pkgs := make(map[string]string)
for pkg, old_commit := range old_pkgs {
if new_commit, found := new_pkgs[pkg]; found {
// pkg modified
if new_commit != old_commit {
pkgs[pkg] = new_commit
}
} else { // not found, pkg removed
pkgs[pkg] = ""
}
}
for pkg, commit := range new_pkgs {
if _, found := old_pkgs[pkg]; !found {
// pkg added
pkgs[pkg] = commit
}
}
PrjGitSubmoduleCheck(config, git, common.DefaultGitPrj, pkgs)
}
// close entire prset that is still open
common.LogInfo("PR State is closed:", prjGitPR.PR.State)
for _, pr := range prset.PRs {
if pr.PR.State == "open" {

View File

@@ -55,44 +55,7 @@ func (s *DefaultStateChecker) ProcessPR(pr *models.PullRequest, config *common.A
return ProcesPullRequest(&event, common.AutogitConfigs{config})
}
func (s *DefaultStateChecker) VerifyProjectState(config *common.AutogitConfig) ([]*interfaces.PRToProcess, error) {
defer func() {
if r := recover(); r != nil {
common.LogError("panic caught")
if err, ok := r.(error); !ok {
common.LogError(err)
}
common.LogError(string(debug.Stack()))
}
}()
prsToProcess := []*interfaces.PRToProcess{}
prjGitOrg, prjGitRepo, prjGitBranch := config.GetPrjGit()
common.LogInfo(" checking", prjGitOrg+"/"+prjGitRepo+"#"+prjGitBranch)
git, err := GitHandler.CreateGitHandler(config.Organization)
common.LogDebug("Git Path:", git.GetPath())
if err != nil {
return nil, fmt.Errorf("Cannot create git handler: %w", err)
}
defer git.Close()
repo, err := Gitea.CreateRepositoryIfNotExist(git, prjGitOrg, prjGitRepo)
if err != nil {
return nil, fmt.Errorf("Error fetching or creating '%s/%s#%s' -- aborting verifyProjectState(). Err: %w", prjGitBranch, prjGitRepo, prjGitBranch, err)
}
_, err = git.GitClone(prjGitRepo, prjGitBranch, repo.SSHURL)
common.PanicOnError(err)
prsToProcess = append(prsToProcess, &interfaces.PRToProcess{
Org: prjGitOrg,
Repo: prjGitRepo,
Branch: prjGitBranch,
})
submodules, err := git.GitSubmoduleList(prjGitRepo, "HEAD")
func PrjGitSubmoduleCheck(config *common.AutogitConfig, git common.Git, repo string, submodules map[string]string) (prsToProcess []*interfaces.PRToProcess, err error) {
nextSubmodule:
for sub, commitID := range submodules {
common.LogDebug(" + checking", sub, commitID)
@@ -135,8 +98,8 @@ nextSubmodule:
}
// not found in past, check if we should advance the branch label ... pull the submodule
git.GitExecOrPanic(prjGitRepo, "submodule", "update", "--init", "--filter", "blob:none", "--", sub)
subDir := path.Join(prjGitRepo, sub)
git.GitExecOrPanic(repo, "submodule", "update", "--init", "--filter", "blob:none", "--", sub)
subDir := path.Join(repo, sub)
newCommits := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(subDir, "rev-list", "^origin/"+branch, commitID), "\n")
if len(newCommits) >= 1 {
@@ -152,10 +115,51 @@ nextSubmodule:
}
}
// forward any package-gits referred by the project git, but don't go back
return prsToProcess, nil
}
func (s *DefaultStateChecker) VerifyProjectState(config *common.AutogitConfig) ([]*interfaces.PRToProcess, error) {
defer func() {
if r := recover(); r != nil {
common.LogError("panic caught")
if err, ok := r.(error); !ok {
common.LogError(err)
}
common.LogError(string(debug.Stack()))
}
}()
prsToProcess := []*interfaces.PRToProcess{}
prjGitOrg, prjGitRepo, prjGitBranch := config.GetPrjGit()
common.LogInfo(" checking", prjGitOrg+"/"+prjGitRepo+"#"+prjGitBranch)
git, err := GitHandler.CreateGitHandler(config.Organization)
common.LogDebug("Git Path:", git.GetPath())
if err != nil {
return nil, fmt.Errorf("Cannot create git handler: %w", err)
}
defer git.Close()
repo, err := Gitea.CreateRepositoryIfNotExist(git, prjGitOrg, prjGitRepo)
if err != nil {
return nil, fmt.Errorf("Error fetching or creating '%s/%s#%s' -- aborting verifyProjectState(). Err: %w", prjGitBranch, prjGitRepo, prjGitBranch, err)
}
_, err = git.GitClone(prjGitRepo, prjGitBranch, repo.SSHURL)
common.PanicOnError(err)
prsToProcess = append(prsToProcess, &interfaces.PRToProcess{
Org: prjGitOrg,
Repo: prjGitRepo,
Branch: prjGitBranch,
})
submodules, err := git.GitSubmoduleList(prjGitRepo, "HEAD")
// forward any package-gits referred by the project git, but don't go back
return PrjGitSubmoduleCheck(config, git, prjGitRepo, submodules)
}
func (s *DefaultStateChecker) CheckRepos() error {
errorList := make([]error, 0, 10)