2
1
forked from adamm/autogits

35 Commits

Author SHA256 Message Date
79d589d565 wip 2025-08-19 17:28:39 +02:00
390cb89702 import: import SHA1 -> SHA256 repos
Also, always import all files to LFS when exported from scmsync
archives, as more often than not they are forgotten
2025-08-17 19:51:01 +02:00
d146fb8c4e common: add last-updated obs status field 2025-08-13 17:05:47 +02:00
7e78ee83c1 common: fix detection of branches 2025-08-13 17:04:48 +02:00
17e925bfd7 importer: follow scmsync'ed repos HEAD 2025-08-13 16:45:47 +02:00
878df15e58 Merge branch 'main' of src.opensuse.org:adamm/autogits 2025-08-13 12:48:35 +02:00
c84af6286d Fix crash on connection
Setting channel object in Rabbit listener object.
2025-08-12 11:29:25 +02:00
d2cbb8fd34 importer: rework according to updated git-importer 2025-08-12 10:38:50 +02:00
8436a49c5d br: build results as svg for packages 2025-08-05 16:08:52 +02:00
106e36d6bf importer: use -packages instead of -package
Improve docs to allow multiple packages specified and how
2025-07-29 13:19:39 +02:00
0ec4986163 importer: continue processing repos ...
continue vs. break issue
2025-07-29 13:16:43 +02:00
fb7f6adc98 importer: allow multiple packages in the -package param 2025-07-28 20:07:07 +02:00
231f29b065 Merge remote-tracking branch 'gitea/main' 2025-07-28 14:04:31 +02:00
3f3645a453 importer: remove branches when exist
Don't generate errors on imports that we ignore. Problem is this
slows donw the import
2025-07-28 14:03:20 +02:00
42e2713cd8 Merge remote-tracking branch 'gitea/main' 2025-07-26 13:56:16 +02:00
3d24dce5c0 common: rabbit refactor
Generalize interface to allow processing of any events, not just
Gitea events.
2025-07-26 13:54:51 +02:00
0cefb45d8a allow imports of + in reponames 2025-07-25 14:57:13 +02:00
ddbb824006 group-review: fix group review approval and disapproval
and add unit tests for this
2025-07-25 13:54:30 +02:00
69dac4ec31 common: fix manifest path when pkg is path 2025-07-18 23:02:03 +02:00
b7e03ab465 common: use standard function for Basenamej 2025-07-18 20:41:57 +02:00
76aec3aabb PR: PrjGit description generation in one place
This allows for easier editing and for guidance of what will
happen next
2025-07-18 16:26:35 +02:00
19fb7e277b importer: case when OBS package name != repo name
also handle just one package import, for testing
2025-07-18 16:21:52 +02:00
51261f1bc1 PR: Add manifest to subdir mapping functions 2025-07-18 15:54:53 +02:00
949810709d import: handle previusly clones scmsync repos 2025-07-17 16:43:53 +02:00
c012570e89 importer: map packages to Gitea names 2025-07-16 21:28:26 +02:00
44a3b15a7d Merge pull request 'gitea_status_proxy' (#56) from ldragon/autogits:proxy into main
Reviewed-on: adamm/autogits#56
Reviewed-by: Adam Majer <adamm@noreply.src.opensuse.org>
2025-07-16 12:44:48 +02:00
c5db1c83a7 PR: detect and rebase project git commits
When project is advanced, and we have other package changes
to same project, the project git changes need to be rebased. The
simplest way of doing this is to skip all the submodule conflicts
and re-create them. This allows the submodules changes to be
mergeable again.
2025-07-15 19:08:05 +02:00
9f0909621b PR: fix timeline fetches
only fetch latest reviews from a user, not all
2025-07-15 11:06:17 +02:00
b3914b04bd Fix logic in crash protection
We must not access review.User object if it is nil
2025-07-11 12:02:37 +02:00
b43a19189e Enable code stream publishing 2025-07-11 12:02:08 +02:00
01b665230e message typo 2025-07-11 12:01:58 +02:00
1a07d4c541 Create Pull Requests to specified branches
instead of always using DefaultBranch. This means that target needs
always gets specified now.
2025-07-11 12:01:43 +02:00
22e44dff47 Don't fail on project git pull request creation. 2025-07-11 12:01:24 +02:00
e5d07f0ce6 refactor to use net/http 2025-05-30 14:43:47 +05:30
df9478a920 gitea status proxy 2025-05-28 11:20:09 +05:30
36 changed files with 2195 additions and 996 deletions

View File

@@ -118,14 +118,23 @@ go build \
-C obs-status-service \
-mod=vendor \
-buildmode=pie
#go build \
# -C workflow-direct \
# -mod=vendor \
# -buildmode=pie
#go build \
# -C workflow-pr \
# -mod=vendor \
# -buildmode=pie
go build \
-C workflow-direct \
-mod=vendor \
-buildmode=pie
go build \
-C workflow-pr \
-mod=vendor \
-buildmode=pie
%define install_autogits_service(unitfile) \
%{!?__unitname:%define __unitname %{unitfile} \
%{__unitname:%{expand:echo %{__unitname} | sed -e 's/@\.service$//' -e 's/\.service$//'}}} \
%pre -n %{__unitname} %service_add_pre %{unitfile} \
%post -n %{__unitname} %service_add_post %{unitfile} \
%preun -n %{__unitname} %service_del_preun %{unitfile} \
%postun -n %{__unitname} %service_del_postun %{unitfile} \
%{nil}
%install
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
@@ -134,20 +143,14 @@ install -D -m0755 devel-importer/devel-importer
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
#install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
#install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
install -D -m0644 systemd/workflow-pr@.service %{buildroot}%{_unitdir}/workflow-pr@.service
%pre -n gitea-events-rabbitmq-publisher
%service_add_pre gitea-events-rabbitmq-publisher.service
%post -n gitea-events-rabbitmq-publisher
%service_add_post gitea-events-rabbitmq-publisher.service
%preun -n gitea-events-rabbitmq-publisher
%service_del_preun gitea-events-rabbitmq-publisher.service
%postun -n gitea-events-rabbitmq-publisher
%service_del_postun gitea-events-rabbitmq-publisher.service
%install_autogits_service(gitea-events-rabbitmq-publisher.service)
%install_autogits_service(workflow-direct@.service)
%install_autogits_service(workflow-pr@.service)
%files -n gitea-events-rabbitmq-publisher
%license COPYING
@@ -183,10 +186,10 @@ install -D -m0755 obs-status-service/obs-status-service
%files -n workflow-direct
%license COPYING
%doc workflow-direct/README.md
#%{_bindir}/workflow-direct
%{_bindir}/workflow-direct
%files -n workflow-pr
%license COPYING
%doc workflow-pr/README.md
#%{_bindir}/workflow-pr
%{_bindir}/workflow-pr

View File

@@ -59,6 +59,7 @@ type AutogitConfig struct {
Reviewers []string // only used by `pr` workflow
ReviewGroups []ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers

View File

@@ -274,12 +274,17 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
}
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--hash", "--verify", "refs/heads/"+branchName)
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--branch", "--hash", branchName)
if err != nil {
return "", fmt.Errorf("Can't find default branch: %s", branchName)
}
return strings.TrimSpace(id), nil
id = strings.TrimSpace(SplitLines(id)[0])
if len(id) < 10 {
return "", fmt.Errorf("Can't find branch: %s", branchName)
}
return id, nil
}
func (e *GitHandlerImpl) GitRemoteHead(gitDir, remote, branchName string) (string, error) {
@@ -345,6 +350,7 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
"GIT_COMMITTER_NAME=" + e.GitCommiter,
"EMAIL=not@exist@src.opensuse.org",
"GIT_LFS_SKIP_SMUDGE=1",
"GIT_LFS_SKIP_PUSH=1",
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
}
if len(ExtraGitParams) > 0 {
@@ -769,6 +775,8 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
done.Lock()
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
LogDebug("Getting submodules for:", commitId)
go func() {
defer done.Unlock()
defer close(data_out.ch)
@@ -916,6 +924,16 @@ type GitStatusData struct {
Path string
Status int
States [3]string
/*
<sub> A 4 character field describing the submodule state.
"N..." when the entry is not a submodule.
"S<c><m><u>" when the entry is a submodule.
<c> is "C" if the commit changed; otherwise ".".
<m> is "M" if it has tracked changes; otherwise ".".
<u> is "U" if there are untracked changes; otherwise ".".
*/
SubmoduleChanges string
}
func parseGitStatusHexString(data io.ByteReader) (string, error) {
@@ -938,6 +956,20 @@ func parseGitStatusHexString(data io.ByteReader) (string, error) {
}
}
func parseGitStatusString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 || c == ' ' {
return string(str), nil
}
str = append(str, c)
}
}
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
@@ -978,7 +1010,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Modified
ret.Path, err = parseGitStatusString(data)
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
@@ -988,11 +1020,11 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Renamed
ret.Path, err = parseGitStatusString(data)
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
ret.States[0], err = parseGitStatusString(data)
ret.States[0], err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
@@ -1002,7 +1034,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Untracked
ret.Path, err = parseGitStatusString(data)
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
@@ -1012,15 +1044,22 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Ignored
ret.Path, err = parseGitStatusString(data)
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
case 'u':
var err error
if err = skipGitStatusEntry(data, 7); err != nil {
if err = skipGitStatusEntry(data, 2); err != nil {
return nil, err
}
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
return nil, err
}
if err = skipGitStatusEntry(data, 4); err != nil {
return nil, err
}
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
@@ -1031,7 +1070,7 @@ func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
return nil, err
}
ret.Status = GitStatus_Unmerged
ret.Path, err = parseGitStatusString(data)
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}

View File

@@ -555,6 +555,8 @@ func TestGitStatusParse(t *testing.T) {
Path: ".gitmodules",
Status: GitStatus_Unmerged,
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
SubmoduleChanges: "N...",
},
},
},

View File

@@ -603,14 +603,14 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName s
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
prOptions := models.CreatePullRequestOption{
Base: repo.DefaultBranch,
Base: targetId,
Head: srcId,
Title: title,
Body: body,
}
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(repo.DefaultBranch).WithHead(srcId),
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(targetId).WithHead(srcId),
gitea.transport.DefaultAuthentication,
); err == nil {
return pr.Payload, nil
@@ -718,20 +718,18 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
}
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
limit := int64(20)
page := int64(1)
resCount := limit
resCount := 1
retData := []*models.TimelineComment{}
for resCount == limit {
for resCount > 0 {
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
issue.NewIssueGetCommentsAndTimelineParams().
WithOwner(org).
WithRepo(repo).
WithIndex(idx).
WithPage(&page).
WithLimit(&limit),
WithPage(&page),
gitea.transport.DefaultAuthentication,
)
@@ -739,11 +737,13 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
return nil, err
}
resCount = int64(len(res.Payload))
resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount)
page++
retData = append(retData, res.Payload...)
}
LogDebug("total results:", len(retData))
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
return time.Time(b.Created).Compare(time.Time(a.Created))

View File

@@ -1,324 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"runtime/debug"
"slices"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type ListenDefinitions struct {
RabbitURL *url.URL // amqps://user:password@host/queue
GitAuthor string
Handlers map[string]RequestProcessor
Orgs []string
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitMessage rabbitmq.Delivery
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
ch, err := connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer ch.Close()
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.processTopicChanges(ch, q.Name)
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.connectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.processRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
ch := make(chan RabbitMessage, 100)
go l.connectAndProcessRabbitMQ(ch)
return ch
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}
func (l *ListenDefinitions) generateTopics() []string {
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
scope := "suse"
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range l.Orgs {
for requestType, _ := range l.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (l *ListenDefinitions) UpdateTopics() {
newTopics := l.generateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
LogDebug("# Handlers:", len(l.Handlers))
LogDebug("# Orgs:", len(l.Orgs))
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = l.generateTopics()
ch := l.connectToRabbitMQ()
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(l.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
continue
}
LogDebug("org:", org, "type:", reqType)
if handler, found := l.Handlers[reqType]; found {
/* h, err := CreateRequestHandler()
if err != nil {
log.Println("Cannot create request handler", err)
continue
}
*/
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
continue
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
}
}

56
common/manifest.go Normal file
View File

@@ -0,0 +1,56 @@
package common
import (
"os"
"path"
"strings"
"gopkg.in/yaml.v3"
)
type Manifest struct {
Subdirectories []string
}
func (m *Manifest) SubdirForPackage(pkg string) string {
if m == nil {
return pkg
}
idx := -1
matchLen := 0
basePkg := path.Base(pkg)
lowercasePkg := strings.ToLower(basePkg)
for i, sub := range m.Subdirectories {
basename := strings.ToLower(path.Base(sub))
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
idx = i
matchLen = len(basename)
}
}
if idx > -1 {
return path.Join(m.Subdirectories[idx], basePkg)
}
return pkg
}
func ReadManifestFile(filename string) (*Manifest, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
return ParseManifestFile(data)
}
func ParseManifestFile(data []byte) (*Manifest, error) {
ret := &Manifest{}
err := yaml.Unmarshal(data, ret)
if err != nil {
return nil, err
}
return ret, nil
}

56
common/manifest_test.go Normal file
View File

@@ -0,0 +1,56 @@
package common_test
import (
"testing"
"src.opensuse.org/autogits/common"
)
func TestManifestSubdirAssignments(t *testing.T) {
tests := []struct {
Name string
ManifestContent string
Packages []string
ManifestLocations []string
}{
{
Name: "empty manifest",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
ManifestLocations: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
},
{
Name: "only few subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "b/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest with trailing /",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
},
}
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
m, err := common.ParseManifestFile([]byte(test.ManifestContent))
if err != nil {
t.Fatal(err)
}
for i, pkg := range test.Packages {
expected := test.ManifestLocations[i]
if l := m.SubdirForPackage(pkg); l != expected {
t.Error("Expected:", expected, "but got:", l)
}
}
})
}
}

View File

@@ -562,25 +562,57 @@ func (c *ObsClient) DeleteProject(project string) error {
}
return nil
}
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
query := url.Query()
query.Add("nostream", "1")
query.Add("start", "0")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("GET", url.String(), nil)
if err != nil {
return nil, err
}
return res.Body, nil
}
type PackageBuildStatus struct {
Package string `xml:"package,attr"`
Code string `xml:"code,attr"`
Details string `xml:"details"`
LastUpdate time.Time
}
func PackageBuildStatusComp(A, B *PackageBuildStatus) int {
return strings.Compare(A.Package, B.Package)
}
type BuildResult struct {
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []*PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
LastUpdate time.Time
}
func BuildResultComp(A, B *BuildResult) int {
if cmp := strings.Compare(A.Project, B.Project); cmp != 0 {
return cmp
}
if cmp := strings.Compare(A.Repository, B.Repository); cmp != 0 {
return cmp
}
return strings.Compare(A.Arch, B.Arch)
}
type Binary struct {
@@ -597,7 +629,7 @@ type BinaryList struct {
type BuildResultList struct {
XMLName xml.Name `xml:"resultlist"`
State string `xml:"state,attr"`
Result []BuildResult `xml:"result"`
Result []*BuildResult `xml:"result"`
isLastBuild bool
}

View File

@@ -25,6 +25,13 @@ type PRSet struct {
BotUser string
}
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
org = prinfo.PR.Base.Repo.Owner.UserName
repo = prinfo.PR.Base.Repo.Name
idx = prinfo.PR.Index
return
}
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
for _, p := range currentSet {
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
@@ -121,27 +128,28 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
}, nil
}
func (rs *PRSet) Contains(pr *models.PullRequest) bool {
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
for _, p := range rs.PRs {
if p.PR.Base.RepoID == pr.Base.RepoID &&
p.PR.Head.Sha == pr.Head.Sha &&
p.PR.Base.Name == pr.Base.Name {
return true
return p, true
}
}
return false
return nil, false
}
func (rs *PRSet) AddPR(pr *models.PullRequest) error {
if rs.Contains(pr) {
return nil
func (rs *PRSet) AddPR(pr *models.PullRequest) *PRInfo {
if pr, found := rs.Find(pr); found {
return pr
}
rs.PRs = append(rs.PRs, &PRInfo{
prinfo := &PRInfo{
PR: pr,
})
return nil
}
rs.PRs = append(rs.PRs, prinfo)
return prinfo
}
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
@@ -172,6 +180,15 @@ func (rs *PRSet) GetPrjGitPR() (*PRInfo, error) {
return nil, PRSet_PrjGitMissing
}
func (rs *PRSet) NeedRecreatingPrjGit(currentBranchHash string) bool {
pr, err := rs.GetPrjGitPR()
if err != nil {
return true
}
return pr.PR.Base.Sha == currentBranchHash
}
func (rs *PRSet) IsConsistent() bool {
prjpr_info, err := rs.GetPrjGitPR()
if err != nil {
@@ -302,7 +319,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
}
pr.Reviews = r
if !pr.Reviews.IsManualMergeOK() {
LogInfo("Not approved manual merge")
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
return false
}
}

View File

@@ -15,7 +15,23 @@ import (
"src.opensuse.org/autogits/common/gitea-generated/models"
mock_common "src.opensuse.org/autogits/common/mock"
)
/*
func TestCockpit(t *testing.T) {
common.SetLoggingLevel(common.LogLevelDebug)
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Fail to timeline", err)
}
t.Log(tl)
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Error:", err)
}
t.Error(r)
}
*/
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
timeline := make([]*models.TimelineComment, len(reviews))
for idx, review := range reviews {

238
common/rabbitmq.go Normal file
View File

@@ -0,0 +1,238 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
type RabbitConnection struct {
RabbitURL *url.URL // amqps://user:password@host/queue
queueName string
ch *rabbitmq.Channel
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitProcessor interface {
GenerateTopics() []string
Connection() *RabbitConnection
ProcessRabbitMessage(msg RabbitMessage) error
}
type RabbitMessage rabbitmq.Delivery
func (l *RabbitConnection) ProcessTopicChanges() {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer l.ch.Close()
if err = l.ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = l.ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = l.ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.ProcessTopicChanges()
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := l.ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.ConnectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.ProcessRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = processor.GenerateTopics()
ch := make(chan RabbitMessage, 100)
go l.ConnectAndProcessRabbitMQ(ch)
return ch
}
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
newTopics := processor.GenerateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
ch := processor.Connection().ConnectToRabbitMQ(processor)
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
if err := processor.ProcessRabbitMessage(msg); err != nil {
LogError("Error processing", msg.RoutingKey, err)
}
}
}

128
common/rabbitmq_gitea.go Normal file
View File

@@ -0,0 +1,128 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"fmt"
"runtime/debug"
"slices"
"strings"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type RabbitMQGiteaEventsProcessor struct {
Handlers map[string]RequestProcessor
Orgs []string
c *RabbitConnection
}
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
if gitea.c == nil {
gitea.c = &RabbitConnection{}
}
return gitea.c
}
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
scope := "suse"
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range gitea.Orgs {
for requestType, _ := range gitea.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(gitea.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
return nil
}
LogDebug("org:", org, "type:", reqType)
if handler, found := gitea.Handlers[reqType]; found {
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
return nil
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
return fmt.Errorf("Invalid routing key: %s", route)
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}

22
common/rabbitmq_obs.go Normal file
View File

@@ -0,0 +1,22 @@
package common
type RabbitMQObsBuildStatusProcessor struct {
c *RabbitConnection
}
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
return []string{}
}
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
if o.c == nil {
o.c = &RabbitConnection{}
}
return o.c
}
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
return nil
}

View File

@@ -50,11 +50,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
u, _ := url.Parse("amqps://rabbit.example.com")
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l := ListenDefinitions{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
topicSubChanges: make(chan string, len(test.topicDelta)*10),
RabbitURL: u,
l := &RabbitMQGiteaEventsProcessor{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
c: &RabbitConnection{
RabbitURL: u,
topicSubChanges: make(chan string, len(test.topicDelta)*10),
},
}
slices.Sort(test.topicDelta)
@@ -64,11 +66,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
}
changes := []string{}
l.UpdateTopics()
l.c.UpdateTopics(l)
a:
for {
select {
case c := <-l.topicSubChanges:
case c := <-l.c.topicSubChanges:
changes = append(changes, c)
default:
changes = []string{}
@@ -78,13 +80,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
l.Orgs = test.orgs2
l.UpdateTopics()
l.c.UpdateTopics(l)
changes = []string{}
b:
for {
select {
case c := <-l.topicSubChanges:
case c := <-l.c.topicSubChanges:
changes = append(changes, c)
default:
slices.Sort(changes)

View File

@@ -25,26 +25,39 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, r
return nil, err
}
reviews := make([]*models.PullReview, 0, 10)
reviews := make([]*models.PullReview, 0, len(reviewers))
var comments []*models.TimelineComment
alreadyHaveUserReview := func(user string) bool {
for _, r := range reviews {
if r.User != nil && r.User.UserName == user {
return true
}
}
return false
}
for idx, item := range timeline {
if item.Type == TimelineCommentType_Review {
for _, r := range rawReviews {
if r.ID == item.ReviewID {
reviews = append(reviews, r)
if !alreadyHaveUserReview(r.User.UserName) {
reviews = append(reviews, r)
}
break
}
}
} else if item.Type == TimelineCommentType_Comment {
comments = append(comments, item)
} else if item.Type == TimelineCommentType_PushPull {
LogDebug("cut-off", item.Created)
timeline = timeline[0:idx]
break
} else {
LogDebug("Unhandled timeline type:", item.Type)
}
}
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
return &PRReviews{
reviews: reviews,
@@ -72,6 +85,7 @@ func (r *PRReviews) IsManualMergeOK() bool {
if c.Updated != c.Created {
continue
}
LogDebug("comment:", c.User.UserName, c.Body)
if slices.Contains(r.reviewers, c.User.UserName) {
if bodyCommandManualMergeOK(c.Body) {
return true

View File

@@ -113,6 +113,10 @@ func (s *Submodule) parseKeyValue(line string) error {
return nil
}
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
return manifest.SubdirForPackage(s.Path)
}
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
data, err := io.ReadAll(reader)
if err != nil {

View File

@@ -23,7 +23,6 @@ import (
"errors"
"flag"
"fmt"
"io/fs"
"log"
"net/url"
"os"
@@ -73,6 +72,10 @@ func runObsCommand(args ...string) ([]string, error) {
var DebugMode bool
func giteaPackage(pkg string) string {
return strings.ReplaceAll(pkg, "+", "_")
}
func projectMaintainer(obs *common.ObsClient, prj string) ([]string, []string) { // users, groups
meta, err := obs.GetProjectMeta(prj)
if err != nil {
@@ -143,7 +146,9 @@ func listMaintainers(obs *common.ObsClient, prj string, pkgs []string) {
log.Panicln(err)
}
contact_email = append(contact_email, fmt.Sprintf("%s <%s>", user.Name, user.Email))
if user != nil {
contact_email = append(contact_email, fmt.Sprintf("%s <%s>", user.Name, user.Email))
}
}
log.Println(strings.Join(contact_email, ", "))
}
@@ -154,6 +159,7 @@ func gitImporter(prj, pkg string) error {
params = append(params, "-l", "debug")
}
params = append(params, pkg)
common.LogDebug("git-importer", params)
cmd := exec.Command("./git-importer", params...)
if idx := slices.IndexFunc(cmd.Env, func(val string) bool { return val[0:12] == "GITEA_TOKEN=" }); idx != -1 {
cmd.Env = slices.Delete(cmd.Env, idx, idx+1)
@@ -166,317 +172,113 @@ func gitImporter(prj, pkg string) error {
return nil
}
func cloneDevel(git common.Git, gitDir, outName, urlString string) error {
url, err := url.Parse(urlString)
// branch := url.Fragment
url.Fragment = ""
params := []string{"clone"}
/* if len(branch) > 0 {
params = append(params, "-b", branch)
}
*/
params = append(params, url.String(), outName)
if err != nil {
return fmt.Errorf("error parsing SSH URL. %w", err)
func cloneDevel(git common.Git, gitDir, outName, urlString, remote string, fatal bool) error {
if url, _ := url.Parse(urlString); url != nil {
url.Fragment = ""
urlString = url.String()
}
params := []string{"clone", "-o", remote}
params = append(params, urlString, outName)
if fatal {
git.GitExecOrPanic(gitDir, params...)
} else {
git.GitExec(gitDir, params...)
}
git.GitExecOrPanic(gitDir, params...)
return nil
}
func importRepos(packages []string) {
factoryRepos := make([]*models.Repository, 0, len(packages)*2)
develProjectPackages := make([]string, 0, len(packages))
for _, pkg := range packages {
src_pkg_name := strings.Split(pkg, ":")
repo, err := client.Repository.RepoGet(
repository.NewRepoGetParams().
WithDefaults().WithOwner("pool").WithRepo(src_pkg_name[0]),
r.DefaultAuthentication)
if err != nil {
if !errors.Is(err, &repository.RepoGetNotFound{}) {
log.Panicln(err)
}
log.Println("Cannot find src package:", src_pkg_name)
develProjectPackages = append(develProjectPackages, src_pkg_name[0])
} else {
factoryRepos = append(factoryRepos, repo.Payload)
}
}
log.Println("Num repos found:", len(factoryRepos))
if len(develProjectPackages) > 0 {
log.Println("Num of repos that need to create:", len(develProjectPackages))
log.Println("Create the following packages in pool to continue:", strings.Join(develProjectPackages, " "))
if forceNonPoolPackages {
log.Println(" IGNORING and will create these as non-pool packages!")
} else {
os.Exit(1)
}
}
oldPackageNames := make([]string, 0, len(factoryRepos))
for _, repo := range factoryRepos {
oldPackageNames = append(oldPackageNames, repo.Name)
}
// fork packags from pool
for i := 0; i < len(oldPackageNames); {
pkg := oldPackageNames[i]
log.Println(" + package:", pkg)
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
log.Println(" ** failed to import openSUSE:Factory", pkg)
log.Println(" ** falling back to devel project only")
develProjectPackages = append(develProjectPackages, pkg)
oldPackageNames = slices.Delete(oldPackageNames, i, i+1)
} else {
i++
func findMissingDevelBranch(git common.Git, pkg, project string) {
d, err := git.GitBranchHead(pkg, "devel")
if err != nil {
if _, err = git.GitBranchHead(pkg, "factory"); err != nil {
log.Println("factory is missing... so maybe repo is b0rked.")
return
}
hash := common.SplitLines(git.GitExecWithOutputOrPanic(pkg,
"log",
"factory",
"--all",
"--grep=build.opensuse.org/package/show/"+project+"/"+pkg,
"-1",
"--pretty=format:%H"))
if len(hash) > 0 {
log.Println(" devel @", hash[0])
git.GitExecOrPanic(pkg, "branch", "devel", hash[0])
} else {
git.GitExecOrPanic(pkg, "branch", "devel", "factory")
}
} else {
log.Println(" devel already exists?", d)
}
}
log.Println("adding remotes...")
for i := 0; i < len(factoryRepos); i++ {
pkg := factoryRepos[i]
// verify that package was created by `git-importer`, or it's scmsync package and clone it
fi, err := os.Stat(filepath.Join(git.GetPath(), pkg.Name))
if os.IsNotExist(err) {
if slices.Contains(develProjectPackages, pkg.Name) {
// failed import of former factory package
continue
}
// scmsync?
devel_project, err := devel_projects.GetDevelProject(pkg.Name)
if err != nil {
log.Panicln("devel project not found for", pkg.Name, "err:", err)
}
meta, _ := obs.GetPackageMeta(devel_project, pkg.Name)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", pkg.Name, meta.ScmSync); err != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main")
continue
}
// try again, should now exist
if fi, err = os.Stat(filepath.Join(git.GetPath(), pkg.Name)); err != nil {
log.Panicln(err)
}
} else if err != nil {
func importFactoryRepoAndCheckHistory(pkg string, meta *common.PackageMeta) (factoryRepo *models.Repository, retErr error) {
if repo, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithDefaults().WithOwner("pool").WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
if !errors.Is(err, &repository.RepoGetNotFound{}) {
log.Panicln(err)
} else {
// verify that we do not have scmsync for imported packages
meta, err := obs.GetPackageMeta(prj, pkg.Name)
if err != nil {
log.Panicln(err)
}
if len(meta.ScmSync) > 0 {
log.Panicln("importing an scmsync package??:", prj, pkg.Name)
}
}
if !fi.IsDir() {
log.Panicln("Expected package file should be a directory. It's not.", fi)
}
log.Println("Cannot find src package:", pkg)
return nil, nil
} else {
factoryRepo = repo.Payload
CreatePoolFork(factoryRepo)
}
// add remote repos
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show", "-n")
switch pkg.Owner.UserName {
case "pool":
if !slices.Contains(strings.Split(out, "\n"), "pool") {
out := git.GitExecWithOutputOrPanic(pkg.Name, "remote", "add", "pool", pkg.CloneURL)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
}
default:
log.Panicln(pkg.Owner.UserName)
if _, err := os.Stat(filepath.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
common.LogDebug("Cloning factory...")
cloneDevel(git, "", pkg, factoryRepo.CloneURL, "pool", true) // in case we have imported
} else if err != nil {
common.PanicOnError(err)
} else {
// we have already cloned it... so, fetch pool remote
common.LogDebug("Fetching pool, as already should have the remote")
if err = git.GitExec(pkg, "fetch", "pool"); err != nil {
common.LogError(err)
return factoryRepo, err
}
}
for _, pkgName := range oldPackageNames {
log.Println("fetching git:", pkgName)
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "remote", "show", "-n"), "\n")
params := []string{"fetch", "--multiple"}
params = append(params, remotes...)
out, _ := git.GitExecWithOutput(pkgName, params...)
if len(strings.TrimSpace(out)) > 1 {
log.Println(out)
}
if slices.Contains(remotes, "origin") {
log.Println(" --- scmsync already, so we are done")
continue
}
// check if devel is ahead or behind factory and use that as reference
import_branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "^factory", "devel"), "\n")) > 0 {
log.Println(" *** devel ahead. Swtiching branches.")
import_branch = "devel"
}
// check that nothing is broken with the update
if slices.Contains(remotes, "pool") {
// check which branch is ahead
branches, err := fs.ReadDir(os.DirFS(path.Join(git.GetPath(), pkgName, ".git/refs/remotes")), "pool")
if err != nil {
if forceBadPool {
log.Println(" *** factory has no branches!!! Treating as a devel package.")
develProjectPackages = append(develProjectPackages, pkgName)
break
} else {
log.Panicln(" *** factory has no branches", branches)
}
}
pool_branch := "factory"
has_factory_devel := false
has_factory_factory := false
for _, branch := range branches {
if branch.Name() == "factory" {
has_factory_factory = true
} else if branch.Name() == "devel" {
has_factory_devel = true
}
}
log.Println(branches)
if has_factory_devel && has_factory_factory {
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "^pool/factory", "pool/devel"), "\n")) > 0 {
log.Println(" *** pool branch devel ahead. Switching branches.")
pool_branch = "devel"
}
} else if has_factory_devel && !has_factory_factory {
pool_branch = "devel"
} else if !has_factory_devel && has_factory_factory {
} else {
log.Panicln("branches screwed up for pkg", pkgName, branches)
}
// find tree object in factory branch
tree := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "-1", "--format=%T", "--no-commit-header", "pool/"+pool_branch))
log.Println("tree", tree)
import_tree_commits := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "--format=%H %T", "--no-commit-header", import_branch), "\n")
found := false
for i := range import_tree_commits {
commit_tree := strings.Split(import_tree_commits[i], " ")
if len(commit_tree) != 2 {
log.Panicln("wrong format?", commit_tree)
}
if commit_tree[1] == tree {
found = true
cherry_picks := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgName, "rev-list", "--no-merges", "--reverse", "--ancestry-path", commit_tree[0]+".."+import_branch), "\n")
log.Println("cherry picks", cherry_picks)
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "pool/"+pool_branch)
for _, pick := range cherry_picks {
git.GitExecOrPanic(pkgName, "cherry-pick", pick)
}
break
}
}
if !found {
log.Println("*** WARNING: Cannot find same tree for pkg", pkgName, "Will use current import instead")
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
}
} else {
git.GitExecOrPanic(pkgName, "checkout", "-B", "main", "heads/"+import_branch)
roots := 0
if _, err := git.GitRemoteHead(pkg, "pool", "devel"); err == nil {
factory_roots := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/factory", "--max-parents=0"))
devel_roots := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/devel", "--max-parents=0"))
roots = len(common.SplitLines(factory_roots))
if devel_roots != factory_roots || len(common.SplitLines(factory_roots)) != 1 {
roots = 10
}
} else if _, err := git.GitRemoteHead(pkg, "pool", "factory"); err == nil {
items := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "pool/factory", "--max-parents=0"))
roots = len(common.SplitLines(items))
} else {
common.LogInfo("No factory import ...")
}
for i := 0; i < len(develProjectPackages); i++ {
pkg := develProjectPackages[i]
meta, _ := obs.GetPackageMeta(prj, pkg)
if len(meta.ScmSync) > 0 {
if err2 := cloneDevel(git, "", pkg, meta.ScmSync); err2 != nil {
log.Panicln(err2)
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main")
continue
} else {
common.PanicOnError(gitImporter(prj, pkg))
if out, err := git.GitExecWithOutput(pkg, "show-ref", "--branches"); err != nil || len(common.SplitStringNoEmpty(out, "\n")) == 0 {
log.Println(" *** no branches in package. removing")
develProjectPackages = slices.Delete(develProjectPackages, i, i+1)
i--
continue
}
}
// mark newer branch as main
branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "rev-list", "^factory", "devel"), "\n")) > 0 {
log.Println(" *** pool branch 'devel' ahead. Switching branches.")
branch = "devel"
}
git.GitExecOrPanic(pkg, "checkout", "-B", "main", branch)
if roots != 1 {
common.LogError("Expected 1 root in factory, but found", roots)
common.LogError("Ignoring current import")
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), pkg)))
retErr = fmt.Errorf("Invalid factory repo -- treating as devel project only")
return
}
slices.SortFunc(factoryRepos, func(a, b *models.Repository) int {
if a.Name == b.Name {
orgOrderNo := func(org string) int {
switch org {
case "pool":
return 1
}
return 0 // current devel to clone
}
devel_project, err := devel_projects.GetDevelProject(pkg)
common.LogDebug("Devel project:", devel_project, err)
if err == common.DevelProjectNotFound {
// assume it's this project, maybe removed from factory
devel_project = prj
}
common.LogDebug("finding missing branches in", pkg, devel_project)
findMissingDevelBranch(git, pkg, devel_project)
return
}
return orgOrderNo(a.Owner.UserName) - orgOrderNo(b.Owner.UserName)
}
return strings.Compare(a.Name, b.Name)
})
factoryRepos = slices.CompactFunc(factoryRepos, func(a, b *models.Repository) bool {
return a.Name == b.Name
})
for _, pkg := range factoryRepos {
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg.Name), r.DefaultAuthentication); err != nil {
// update package
fork, err := client.Repository.CreateFork(repository.NewCreateForkParams().
WithOwner(pkg.Owner.UserName).
WithRepo(pkg.Name).
WithBody(&models.CreateForkOption{
Organization: org,
}), r.DefaultAuthentication)
if err != nil {
log.Panicln("Error while trying to create fork from", pkg.Owner.UserName, pkg.Name, "to", org, ":", err)
}
repo = fork.Payload
} else {
repo = repoData.Payload
}
// branchName := repo.DefaultBranch
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg.Name, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkg.Name, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg.Name, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg.Name, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg.Name, "push", "develorigin", "main", "-f")
git.GitExec(pkg.Name, "push", "develorigin", "--delete", "factory", "devel")
// git.GitExecOrPanic(pkg.Name, "checkout", "-B", "main", "devel/main")
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
func SetRepoOptions(repo *models.Repository) {
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(repo.Name).WithBody(
&models.EditRepoOption{
HasPullRequests: true,
HasPackages: false,
HasReleases: false,
@@ -488,81 +290,305 @@ func importRepos(packages []string) {
AllowRebaseUpdate: false,
AllowManualMerge: true,
AutodetectManualMerge: true,
DefaultMergeStyle: "fast-forward-only",
AllowRebase: false,
DefaultAllowMaintainerEdit: true,
}), r.DefaultAuthentication)
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
if err != nil {
log.Panicln("Failed to adjust repository:", repo.Name, err)
}
}
func CreatePoolFork(factoryRepo *models.Repository) *models.Repository {
pkg := factoryRepo.Name
log.Println("factory fork creator for develProjectPackage:", pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
// update package
fork, err := client.Repository.CreateFork(repository.NewCreateForkParams().
WithOwner("pool").
WithRepo(factoryRepo.Name).
WithBody(&models.CreateForkOption{
Organization: org,
}), r.DefaultAuthentication)
if err != nil {
log.Panicln("Error while trying to create fork from 'pool'", pkg, "to", org, ":", err)
}
repo := fork.Payload
return repo
} else {
return repoData.Payload
}
}
func CreateDevelOnlyPackage(pkg string) *models.Repository {
log.Println("repo creator for develProjectPackage:", pkg)
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil {
giteaPkg := giteaPackage(pkg)
repoData, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &giteaPkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
if err != nil {
log.Panicln("Failed to set default branch for package fork:", repo.Owner.UserName, "/", repo.Name, err)
log.Panicln("Error creating new package repository:", pkg, err)
}
repo := repoData.Payload
return repo
} else {
return repoData.Payload
}
}
func PushRepository(factoryRepo, develRepo *models.Repository, pkg string) (repo *models.Repository) {
// branchName := repo.DefaultBranch
if factoryRepo != nil {
repo = CreatePoolFork(factoryRepo)
/*
devel
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg, "push", "develorigin", "--delete", b)
}
}
}*/
//branches := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg.Name, "branch", "-r"), "\n")
/* factory
for _, b := range branches {
if len(b) > 12 && b[0:12] == "develorigin/" {
b = b[12:]
if b == "factory" || b == "devel" {
git.GitExec(pkg.Name, "push", "develorigin", "--delete", b)
}
}
}
*/
// git.GitExecOrPanic(pkg.ame, "checkout", "-B", "main", "devel/main")
} else {
repo = CreateDevelOnlyPackage(pkg)
}
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg, "remote", "add", "develorigin", repo.SSHURL)
// git.GitExecOrPanic(pkgName, "fetch", "devel")
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
SetRepoOptions(repo)
git.GitExec(pkg, "push", "develorigin", "--delete", "factory")
git.GitExec(pkg, "push", "develorigin", "--delete", "devel")
git.GitExec(pkg, "push", "develorigin", "--delete", "leap-16.0")
return repo
}
func importDevelRepoAndCheckHistory(pkg string, meta *common.PackageMeta) *models.Repository {
repo := CreateDevelOnlyPackage(pkg)
if _, err := os.Stat(filepath.Join(git.GetPath(), pkg)); os.IsNotExist(err) {
cloneDevel(git, "", pkg, repo.SSHURL, "develorigin", false) // in case we have imported
}
if CloneScmsync(pkg, meta) {
return repo
}
var p, dp string
factory_branch, fhe := git.GitRemoteHead(pkg, "develorigin", "factory")
if fhe == nil {
p = strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", factory_branch))
} else {
common.LogError(fhe)
}
devel_branch, dhe := git.GitRemoteHead(pkg, "develorigin", "devel")
if dhe != nil {
devel_project, err := devel_projects.GetDevelProject(pkg)
common.LogDebug("Devel project:", devel_project, err)
if err == common.DevelProjectNotFound {
// assume it's this project, maybe removed from factory
devel_project = prj
}
common.LogDebug("finding missing branches in", pkg, devel_project)
findMissingDevelBranch(git, pkg, devel_project)
devel_branch, dhe = git.GitBranchHead(pkg, "devel")
}
if dhe == nil {
dp = strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", devel_branch))
} else {
common.LogError(dhe)
}
// even if one parent for both, we need common ancestry, or we are comparing different things.
mb, mb_err := git.GitExecWithOutput(pkg, "merge-base", factory_branch, devel_branch)
mb = strings.TrimSpace(mb)
if p != "1" || dp != "1" || mb_err != nil || mb != factory_branch || mb != devel_branch {
common.LogInfo("Bad export found ... clearing", p, dp)
common.LogInfo(" merge branch:", mb, factory_branch, devel_branch, mb_err)
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), pkg)))
}
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
common.PanicOnError(gitImporter(prj, pkg))
}
if p := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", "factory")); p != "1" {
common.LogError("Failed to import package:", pkg)
common.PanicOnError(fmt.Errorf("Expecting 1 root in after devel import, but have %s", p))
}
if out, err := git.GitExecWithOutput(pkg, "show-ref", "--branches"); err != nil || len(common.SplitStringNoEmpty(out, "\n")) == 0 {
common.LogError(" *** no branches in package. removing")
return repo
}
return repo
}
func SetMainBranch(pkg string, meta *common.PackageMeta) {
// scnsync, follow that and don't care
common.LogDebug("Setting main branch...")
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if slices.Contains(remotes, "origin") {
u, err := url.Parse(meta.ScmSync)
common.PanicOnError(err)
if len(u.Fragment) == 0 {
u.Fragment = "HEAD"
}
if err := git.GitExec(pkg, "checkout", "-B", "main", u.Fragment); err != nil {
git.GitExecOrPanic(pkg, "checkout", "-B", "main", "origin/"+u.Fragment)
}
return
}
// check if we have factory
if _, err := git.GitBranchHead(pkg, "factory"); err != nil {
if len(git.GitExecWithOutputOrPanic(pkg, "show-ref", "pool/factory")) > 20 {
git.GitExecOrPanic(pkg, "branch", "factory", "pool/factory")
}
}
for _, pkg := range develProjectPackages {
var repo *models.Repository
if repoData, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication); err != nil {
_, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
&models.CreateRepoOption{
ObjectFormatName: "sha256",
AutoInit: false,
Name: &pkg,
DefaultBranch: "main",
}),
r.DefaultAuthentication,
)
// mark newer branch as main
branch := "factory"
if len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "rev-list", "^factory", "devel"), "\n")) > 0 {
branch = "devel"
}
common.LogInfo("setting main to", branch)
git.GitExecOrPanic(pkg, "checkout", "-B", "main", branch)
}
if err != nil {
log.Panicln("Error creating new package repository:", pkg, err)
func ObsToRepoName(obspkg string) string {
return strings.ReplaceAll(obspkg, "+", "_")
}
func ImportSha1Sync(pkg string, url *url.URL) {
common.LogDebug("Converting SHA1", url.String())
branch := url.Fragment
url.Fragment = ""
p := path.Join(pkg, "sha1stuff")
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), p)))
git.GitExecOrPanic(pkg, "clone", "--mirror", url.String(), "sha1stuff")
git.GitExecOrPanic(p, "fetch", "origin", branch)
gitexport := exec.Command("/usr/bin/git", "fast-export", "--signed-tags=strip", "--tag-of-filtered-object=drop", "--all")
gitexport.Dir = path.Join(git.GetPath(), p)
gitexportData, err := gitexport.Output()
common.LogDebug("Got export data size:", len(gitexportData))
common.PanicOnError(err)
gitimport := exec.Command("/usr/bin/git", "fast-import", "--allow-unsafe-features")
gitimport.Dir = path.Join(git.GetPath(), pkg)
gitimport.Stdin = bytes.NewReader(gitexportData)
data, err := gitimport.CombinedOutput()
common.LogError(string(data))
common.PanicOnError(err)
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), p)))
}
func LfsImport(pkg string) {
git.GitExecOrPanic(pkg, "lfs", "migrate", "import", "--everything",
"--include=*.7z,*.bsp,*.bz2,*.gem,*.gz,*.jar,*.lz,*.lzma,*.obscpio,*.oxt,*.pdf,*.png,*.rpm,*.tar,*.tbz,*.tbz2,*.tgz,*.ttf,*.txz,*.whl,*.xz,*.zip,*.zst")
}
func CloneScmsync(pkg string, meta *common.PackageMeta) bool {
if len(meta.ScmSync) > 0 {
u, _ := url.Parse(meta.ScmSync)
if remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n"); !slices.Contains(remotes, "origin") {
branch := u.Fragment
if len(branch) == 0 {
branch = "HEAD"
}
ret, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(
&models.EditRepoOption{
HasPullRequests: true,
HasPackages: false,
HasReleases: false,
HasActions: false,
AllowMerge: true,
AllowRebaseMerge: false,
AllowSquash: false,
AllowFastForwardOnly: true,
AllowRebaseUpdate: false,
AllowManualMerge: true,
AutodetectManualMerge: true,
DefaultMergeStyle: "fast-forward-only",
AllowRebase: false,
DefaultAllowMaintainerEdit: true,
}),
r.DefaultAuthentication,
)
if err != nil {
log.Panicln("Failed to adjust repository:", pkg, err)
}
repo = ret.Payload
} else {
repo = repoData.Payload
u.Fragment = ""
git.GitExecOrPanic(pkg, "remote", "add", "origin", u.String())
u.Fragment = branch
}
if err := git.GitExec(pkg, "fetch", "origin"); err != nil && strings.Contains(err.Error(), "fatal: mismatched algorithms: client sha256; server sha1") {
ImportSha1Sync(pkg, u)
} else if err != nil {
panic(err)
}
remotes := common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkg, "remote", "show"), "\n")
if !slices.Contains(remotes, "develorigin") {
git.GitExecOrPanic(pkg, "remote", "add", "develorigin", repo.SSHURL)
}
if slices.Contains(remotes, "origin") {
git.GitExecOrPanic(pkg, "lfs", "fetch", "--all")
git.GitExecOrPanic(pkg, "lfs", "push", "develorigin", "--all")
}
git.GitExecOrPanic(pkg, "push", "develorigin", "main", "-f")
git.GitExec(pkg, "push", "develorigin", "--delete", "factory", "devel")
LfsImport(pkg)
return true
}
_, err := client.Repository.RepoEdit(repository.NewRepoEditParams().WithOwner(org).WithRepo(pkg).WithBody(&models.EditRepoOption{
DefaultBranch: "main",
DefaultMergeStyle: "fast-forward-only",
}), r.DefaultAuthentication)
return false
}
func importRepo(pkg string) (BrokenFactoryPackage, FailedImport bool) {
BrokenFactoryPackage = false
FailedImport = false
var develRepo, factoryRepo *models.Repository
src_pkg_name := strings.Split(pkg, ":")
pkg = src_pkg_name[0]
meta, err := obs.GetPackageMeta(prj, pkg)
if err != nil {
meta, err = obs.GetPackageMeta(prj, pkg)
if err != nil {
log.Panicln("Failed to set default branch for package fork:", repo.Owner.UserName, "/", repo.Name, err)
log.Println("Error fetching pkg meta for:", prj, pkg, err)
}
}
if err != nil {
common.PanicOnError(err)
}
if meta == nil {
panic("package meta is nil...")
}
factoryRepo, err = importFactoryRepoAndCheckHistory(pkg, meta)
if factoryRepo != nil && err != nil {
BrokenFactoryPackage = true
}
if factoryRepo == nil && forceNonPoolPackages {
log.Println(" IGNORING and will create these as non-pool packages!")
}
if factoryRepo == nil || BrokenFactoryPackage {
develRepo = importDevelRepoAndCheckHistory(pkg, meta)
} else {
CloneScmsync(pkg, meta)
}
SetMainBranch(pkg, meta)
PushRepository(factoryRepo, develRepo, pkg)
return
}
func syncOrgTeams(groupName string, origTeam []common.PersonRepoMeta) []string {
@@ -653,7 +679,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
missing := []string{}
uids := make([]common.PersonRepoMeta, len(orig_uids))
copy(uids, orig_uids)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
collab, err := client.Repository.RepoListCollaborators(repository.NewRepoListCollaboratorsParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
if err != nil {
if errors.Is(err, &repository.RepoListCollaboratorsNotFound{}) {
return missing
@@ -674,7 +700,7 @@ func syncPackageCollaborators(pkg string, orig_uids []common.PersonRepoMeta) []s
log.Println("missing collabs for", pkg, ":", uids)
}
for _, u := range uids {
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(pkg).WithBody(&models.AddCollaboratorOption{
_, err := client.Repository.RepoAddCollaborator(repository.NewRepoAddCollaboratorParams().WithOwner(org).WithRepo(giteaPackage(pkg)).WithBody(&models.AddCollaboratorOption{
Permission: "write",
}).WithCollaborator(u.UserID), r.DefaultAuthentication)
@@ -705,6 +731,11 @@ func syncMaintainersToGitea(pkgs []string) {
missingDevs := []string{}
devs := []string{}
if len(prjMeta.ScmSync) > 0 {
common.LogInfo("Project already in Git. Maintainers must have been already synced. Skipping...")
return
}
for _, group := range prjMeta.Groups {
if group.GroupID == "factory-maintainers" {
log.Println("Ignoring factory-maintainers")
@@ -809,20 +840,31 @@ func createPrjGit() {
if err != nil {
log.Panicln(err)
}
file.WriteString("{\n // Reference build project\n \"ObsProject\": \""+prj+"\",\n}\n")
file.WriteString("{\n // Reference build project\n \"ObsProject\": \"" + prj + "\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "staging.config")
if file, err = os.Create(path.Join(git.GetPath(), common.DefaultGitPrj, "workflow.config")); err != nil {
log.Panicln(err)
}
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \""+org+"\",\n}\n")
file.WriteString("{\n \"Workflows\": [\"direct\", \"pr\"],\n \"Organization\": \"" + org + "\",\n}\n")
file.Close()
git.GitExecOrPanic(common.DefaultGitPrj, "add", "workflow.config")
}
}
}
func TrimMultibuildPackages(packages []string) []string {
for i := 0; i < len(packages); {
if strings.Contains(packages[i], ":") {
packages = slices.Delete(packages, i, i+1)
} else {
i++
}
}
return packages
}
var client *apiclient.GiteaAPI
var r *transport.Runtime
var git common.Git
@@ -857,12 +899,19 @@ func main() {
syncMaintainers := flags.Bool("sync-maintainers-only", false, "Sync maintainers to Gitea and exit")
flags.BoolVar(&forceBadPool, "bad-pool", false, "Force packages if pool has no branches due to bad import")
flags.BoolVar(&forceNonPoolPackages, "non-pool", false, "Allow packages that are not in pool to be created. WARNING: Can't add to factory later!")
specificPackages := flags.String("packages", "", "Process specific package, separated by commas, ignoring the others")
resumeAt := flags.String("resume", "", "Resume import at given pacakge")
syncPool := flags.Bool("sync-pool-only", false, "Force updates pool based on currrently imported project")
if help := flags.Parse(os.Args[1:]); help == flag.ErrHelp || flags.NArg() != 2 {
printHelp(helpString.String())
return
}
if DebugMode {
common.SetLoggingLevel(common.LogLevelDebug)
}
r = transport.New(*giteaHost, apiclient.DefaultBasePath, [](string){"https"})
r.DefaultAuthentication = transport.BearerToken(common.GetGiteaToken())
// r.SetDebug(true)
@@ -897,13 +946,7 @@ func main() {
prj = flags.Arg(0)
org = flags.Arg(1)
packages, err := runObsCommand("ls", prj)
for i := 0; i < len(packages); {
if strings.Contains(packages[i], ":") {
packages = slices.Delete(packages, i, i+1)
} else {
i++
}
}
packages = TrimMultibuildPackages(packages)
git, err = gh.CreateGitHandler(org)
if err != nil {
@@ -912,6 +955,27 @@ func main() {
defer git.Close()
log.Println(" - working directory:" + git.GetPath())
if *syncPool {
factory_pkgs, err := runObsCommand("ls", "openSUSE:Factory")
common.PanicOnError(err)
common.LogInfo("Syncing pool only...")
factory_pkgs = TrimMultibuildPackages(factory_pkgs)
for _, pkg := range packages {
if !slices.Contains(factory_pkgs, pkg) {
continue
}
repo, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithOwner(org).WithRepo(ObsToRepoName(pkg)), r.DefaultAuthentication)
common.PanicOnError(err)
if !slices.Contains(common.SplitLines(git.GitExecWithOutputOrPanic(pkg, "remote")), "pool") {
git.GitExecOrPanic(pkg, "remote", "add", "pool", repo.Payload.SSHURL)
}
git.GitExecOrPanic(pkg, "fetch", "pool")
}
}
/*
for _, pkg := range packages {
if _, err := client.Organization.CreateOrgRepo(organization.NewCreateOrgRepoParams().WithOrg(org).WithBody(
@@ -952,12 +1016,42 @@ func main() {
if *purgeOnly {
log.Println("Purging repositories...")
for _, pkg := range packages {
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(pkg), r.DefaultAuthentication)
pkgs := packages
if len(*specificPackages) > 0 {
pkgs = common.SplitStringNoEmpty(*specificPackages, ",")
}
for _, pkg := range pkgs {
client.Repository.RepoDelete(repository.NewRepoDeleteParams().WithOwner(org).WithRepo(giteaPackage(pkg)), r.DefaultAuthentication)
}
os.Exit(10)
}
importRepos(packages)
if len(*specificPackages) != 0 {
packages = common.SplitStringNoEmpty(*specificPackages, ",")
}
slices.Sort(packages)
BrokenOBSPackage := []string{}
FailedImport := []string{}
for _, pkg := range packages {
if len(*resumeAt) > 0 && strings.Compare(*resumeAt, pkg) > 0 {
common.LogDebug(pkg, "skipped due to resuming at", *resumeAt)
continue
}
b, f := importRepo(pkg)
if b {
BrokenOBSPackage = append(BrokenOBSPackage, pkg)
}
if f {
FailedImport = append(FailedImport, pkg)
}
}
syncMaintainersToGitea(packages)
common.LogError("Have broken pool packages:", len(BrokenOBSPackage))
// common.LogError("Total pool packages:", len(factoryRepos))
common.LogError("Failed to import:", strings.Join(FailedImport, ","))
common.LogInfo("BROKEN Pool packages:", strings.Join(BrokenOBSPackage, "\n"))
}

View File

@@ -0,0 +1,46 @@
package main
import (
"encoding/json"
"fmt"
"io"
"os"
"github.com/tailscale/hujson"
)
type Config struct {
ForgeEndpoint string `json:"forge_url"`
Keys []string `json:"keys"`
}
type contextKey string
const configKey contextKey = "config"
func ReadConfig(reader io.Reader) (*Config, error) {
data, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("error reading config data: %w", err)
}
config := Config{}
data, err = hujson.Standardize(data)
if err != nil {
return nil, fmt.Errorf("failed to parse json: %w", err)
}
if err := json.Unmarshal(data, &config); err != nil {
return nil, fmt.Errorf("error parsing json to api keys and target url: %w", err)
}
return &config, nil
}
func ReadConfigFile(filename string) (*Config, error) {
file, err := os.Open(filename)
if err != nil {
return nil, fmt.Errorf("cannot open config file for reading. err: %w", err)
}
defer file.Close()
return ReadConfig(file)
}

View File

@@ -0,0 +1,15 @@
package main
import (
"context"
"net/http"
)
func ConfigMiddleWare(cfg *Config) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := context.WithValue(r.Context(), configKey, cfg)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
}

169
gitea_status_proxy/main.go Normal file
View File

@@ -0,0 +1,169 @@
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"net/http"
"os"
"slices"
"strings"
"src.opensuse.org/autogits/common"
)
type Status struct {
Context string `json:"context"`
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
type StatusInput struct {
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
func main() {
configFile := flag.String("config", "", "status proxy config file")
flag.Parse()
if *configFile == "" {
common.LogError("missing required argument config")
return
}
config, err := ReadConfigFile(*configFile)
if err != nil {
common.LogError("Failed to read config file", err)
return
}
mux := http.NewServeMux()
mux.Handle("/repos/{owner}/{repo}/statuses/{sha}", ConfigMiddleWare(config)(http.HandlerFunc(StatusProxy)))
common.LogInfo("server up and listening on :3000")
err = http.ListenAndServe(":3000", mux)
if err != nil {
common.LogError("Server failed to start up", err)
}
}
func StatusProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
config, ok := r.Context().Value(configKey).(*Config)
if !ok {
common.LogError("Config missing from context")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
header := r.Header.Get("Authorization")
if header == "" {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token_arr := strings.Split(header, " ")
if len(token_arr) != 2 {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
if !strings.EqualFold(token_arr[0], "Bearer") {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token := token_arr[1]
if !slices.Contains(config.Keys, token) {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
owner := r.PathValue("owner")
repo := r.PathValue("repo")
sha := r.PathValue("sha")
if !ok {
common.LogError("Failed to get config from context, is it set?")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
}
posturl := fmt.Sprintf("%s/repos/%s/%s/statuses/%s", config.ForgeEndpoint, owner, repo, sha)
decoder := json.NewDecoder(r.Body)
var statusinput StatusInput
err := decoder.Decode(&statusinput)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
status := Status{
Context: "Build in obs",
State: statusinput.State,
TargetUrl: statusinput.TargetUrl,
}
status_payload, err := json.Marshal(status)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
client := &http.Client{}
req, err := http.NewRequest("POST", posturl, bytes.NewBuffer(status_payload))
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
ForgeToken := os.Getenv("GITEA_TOKEN")
if ForgeToken == "" {
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
common.LogError("GITEA_TOKEN was not set, all requests will fail")
return
}
req.Header.Add("Content-Type", "Content-Type")
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
resp, err := client.Do(req)
if err != nil {
common.LogError(fmt.Sprintf("Request to forge endpoint failed: %v", err))
http.Error(w, http.StatusText(http.StatusBadGateway), http.StatusBadGateway)
return
}
defer resp.Body.Close()
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(resp.StatusCode)
/*
the commented out section sets every key
value from the headers, unsure if this
leaks information from gitea
for k, v := range resp.Header {
for _, vv := range v {
w.Header().Add(k, vv)
}
}
*/
_, err = io.Copy(w, resp.Body)
if err != nil {
common.LogError("Error copying response body: %v", err)
}
} else {
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
return
}
}

3
go.mod
View File

@@ -16,6 +16,8 @@ require (
require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
@@ -28,6 +30,7 @@ require (
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/redis/go-redis/v9 v9.11.0 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect

6
go.sum
View File

@@ -1,8 +1,12 @@
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -50,6 +54,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=

View File

@@ -11,6 +11,7 @@ import (
"strconv"
"strings"
"time"
"unicode"
"src.opensuse.org/autogits/common"
"src.opensuse.org/autogits/common/gitea-generated/models"
@@ -21,9 +22,10 @@ var acceptRx *regexp.Regexp
var rejectRx *regexp.Regexp
var groupName string
func InitRegex(groupName string) {
acceptRx = regexp.MustCompile("\\s*:\\s*LGTM")
rejectRx = regexp.MustCompile("\\s*:\\s*")
func InitRegex(newGroupName string) {
groupName = newGroupName
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
rejectRx = regexp.MustCompile("^:\\s*")
}
func ParseReviewLine(reviewText string) (bool, string) {
@@ -34,7 +36,18 @@ func ParseReviewLine(reviewText string) (bool, string) {
return false, line
}
return true, line[glen:]
l := line[glen:]
for idx, r := range l {
if unicode.IsSpace(r) {
continue
} else if r == ':' {
return true, l[idx:]
} else {
return false, line
}
}
return false, line
}
func ReviewAccepted(reviewText string) bool {

View File

@@ -2,6 +2,76 @@ package main
import "testing"
func TestReviews(t *testing.T) {
func TestReviewApprovalCheck(t *testing.T) {
tests := []struct {
Name string
GroupName string
InString string
Approved bool
Rejected bool
}{
{
Name: "Empty String",
GroupName: "group",
InString: "",
},
{
Name: "Random Text",
GroupName: "group",
InString: "some things LGTM",
},
{
Name: "Group name with Random Text means disapproval",
GroupName: "group",
InString: "@group: some things LGTM",
Rejected: true,
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "LGTM approval",
GroupName: "group2",
InString: "@group2: LGTM",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approved",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approve",
Approved: true,
},
{
Name: "disapproval",
GroupName: "group2",
InString: "@group2: disapprove",
Rejected: true,
},
}
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
InitRegex(test.GroupName)
if r := ReviewAccepted(test.InString); r != test.Approved {
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
}
if r := ReviewRejected(test.InString); r != test.Rejected {
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
}
})
}
}

View File

@@ -315,7 +315,9 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name)
meta.PublicFlags = common.Flags{Contents: "<disable/>"}
// QE wants it published ... also we should not hardcode it here, since
// it is configurable via the :PullRequest project
// meta.PublicFlags = common.Flags{Contents: "<disable/>"}
meta.Groups = nil
meta.Persons = nil
@@ -459,7 +461,7 @@ func FetchOurLatestActionableReview(gitea common.Gitea, org, repo string, id int
for idx := len(reviews) - 1; idx >= 0; idx-- {
review := reviews[idx]
if review.User != nil || review.User.UserName == Username {
if review.User == nil || review.User.UserName == Username {
if IsDryRun {
// for purposes of moving forward a no-op check
return review, nil
@@ -547,7 +549,7 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
}
if pr.State != "closed" {
common.LogInfo(" ignoring peding PR", thread.Subject.HTMLURL, " state:", pr.State)
common.LogInfo(" ignoring pending PR", thread.Subject.HTMLURL, " state:", pr.State)
return false
}

View File

@@ -1 +1,2 @@
obs-status-service
*.svg

View File

@@ -22,8 +22,13 @@ import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"slices"
"strings"
"time"
"src.opensuse.org/autogits/common"
)
@@ -33,27 +38,18 @@ const (
)
var obs *common.ObsClient
var debug bool
func LogDebug(v ...any) {
if debug {
log.Println(v...)
func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
list := common.BuildResultList{
Result: res,
}
}
func ProjectStatusSummarySvg(project string) []byte {
res := GetCurrentStatus(project)
if res == nil {
return nil
}
pkgs := res.GetPackageList()
pkgs := list.GetPackageList()
maxLen := 0
for _, p := range pkgs {
maxLen = max(maxLen, len(p))
}
width := float32(len(res.Result))*1.5 + float32(maxLen)*0.8
width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
height := 1.5*float32(maxLen) + 30
ret := bytes.Buffer{}
@@ -64,21 +60,78 @@ func ProjectStatusSummarySvg(project string) []byte {
ret.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
ret.WriteString(`<defs>
<g id="f"> <!-- failed -->
<rect width="1em" height="1em" fill="#800" />
<rect width="8em" height="1.5em" fill="#800" />
</g>
<g id="s"> <!--succeeded-->
<rect width="1em" height="1em" fill="#080" />
<rect width="8em" height="1.5em" fill="#080" />
</g>
<g id="buidling"> <!--building-->
<rect width="1em" height="1em" fill="#880" />
<rect width="8em" height="1.5em" fill="#880" />
</g>
</defs>`)
ret.WriteString(`<use href="#f" x="1em" y="2em"/>`)
ret.WriteString(`</svg>`)
return ret.Bytes()
}
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
if R != nil && S != nil {
switch S.Code {
case "succeeded", "failed", "building":
return "/buildlog/" + R.Project + "/" + S.Package + "/" + R.Repository + "/" + R.Arch
}
}
return ""
}
func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
// per repo, per arch status bins
repo_names := []string{}
package_names := []string{}
multibuild_prefix := pkg + ":"
for _, r := range res {
if pos, found := slices.BinarySearchFunc(repo_names, r.Repository, strings.Compare); !found {
repo_names = slices.Insert(repo_names, pos, r.Repository)
}
for _, p := range r.Status {
if p.Package == pkg || strings.HasPrefix(p.Package, multibuild_prefix) {
if pos, found := slices.BinarySearchFunc(package_names, p.Package, strings.Compare); !found {
package_names = slices.Insert(package_names, pos, p.Package)
}
}
}
}
ret := NewSvg()
for _, pkg = range package_names {
// if len(package_names) > 1 {
ret.WriteTitle(pkg)
// }
for _, name := range repo_names {
ret.WriteSubtitle(name)
// print all repo arches here and build results
for _, r := range res {
if r.Repository != name {
continue
}
for _, s := range r.Status {
if s.Package == pkg {
link := LinkToBuildlog(r, s)
ret.WritePackageStatus(link, r.Arch, s.Code, s.Details)
}
}
}
}
}
return ret.GenerateSvg()
}
func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus) []byte {
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
if !ok {
buildStatus = common.ObsBuildStatusDetails["error"]
@@ -95,12 +148,18 @@ func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
}
}
log.Println(status, " -> ", buildStatus)
buildlog := LinkToBuildlog(repo, status)
startTag := ""
endTag := ""
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="` + fillColor + `"/>
<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>
</svg>`)
if len(buildlog) > 0 {
startTag = "<a href=\"" + buildlog + "\">"
endTag = "</a>"
}
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">` +
`<rect width="100%" height="100%" fill="` + fillColor + `"/>` + startTag +
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>` + endTag + `</svg>`)
}
func main() {
@@ -108,60 +167,120 @@ func main() {
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
obsUrl := flag.String("obs-url", "https://api.opensuse.org", "OBS API endpoint for package buildlog information")
debug := flag.Bool("debug", false, "Enable debug logging")
// RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
// Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
flag.Parse()
common.PanicOnError(common.RequireObsSecretToken())
if *debug {
common.SetLoggingLevel(common.LogLevelDebug)
}
// common.PanicOnError(common.RequireObsSecretToken())
var err error
if obs, err = common.NewObsClient(*obsHost); err != nil {
if obs, err = common.NewObsClient(*obsUrl); err != nil {
log.Fatal(err)
}
http.HandleFunc("GET /{Project}", func(res http.ResponseWriter, req *http.Request) {
if redisUrl := os.Getenv("REDIS"); len(redisUrl) > 0 {
RedisConnect(redisUrl)
} else {
common.LogError("REDIS needs to contains URL of the OBS Redis instance with login information")
return
}
go func() {
for {
if err := RescanRepositories(); err != nil {
common.LogError("Failed to rescan repositories.", err)
}
time.Sleep(time.Minute * 5)
}
}()
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
common.LogInfo(" request: GET /status/" + obsPrj)
res.WriteHeader(http.StatusBadRequest)
})
http.HandleFunc("GET /{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
/*
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
status, _ := PackageBuildStatus(obsPrj, obsPkg)
svg := PackageStatusSummarySvg(status)
*/
status := FindAndUpdateProjectResults(obsPrj)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
//res.Header().Add("size", fmt.Sprint(len(svg)))
//res.Write(svg)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
})
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
status := FindAndUpdateRepoResults(obsPrj, repo)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
common.LogInfo("GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch)
res.Header().Add("content-type", "image/svg+xml")
for _, r := range FindAndUpdateProjectResults(prj) {
if r.Arch == arch && r.Repository == repo {
if idx, found := slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res.Write(BuildStatusSvg(r, r.Status[idx]))
return
}
break
}
}
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("location", "https://build.opensuse.org/package/live_build_log/"+prj+"/"+pkg+"/"+repo+"/"+arch)
res.WriteHeader(307)
return
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
data, err := obs.BuildLog(prj, pkg, repo, arch)
if err != nil {
res.WriteHeader(http.StatusInternalServerError)
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
return
}
defer data.Close()
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
for _, status := range r.Status {
if status.Package == pkg {
res.Write(PackageStatusSummarySvg(status))
return
}
}
}
}
io.Copy(res, data)
})
go ProcessUpdates()
if *disableTls {
log.Fatal(http.ListenAndServe(*listen, nil))
} else {

View File

@@ -0,0 +1,82 @@
package main
import (
"os"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusSvg(t *testing.T) {
os.WriteFile("teststatus.svg", BuildStatusSvg(nil, &common.PackageBuildStatus{
Package: "foo",
Code: "succeeded",
Details: "more success here",
}), 0o777)
data := []*common.BuildResult{
{
Project: "project:foo",
Repository: "repo1",
Arch: "x86_64",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "s390x",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "unresolveable",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "i586",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "blocked",
Details: "foo bar is why",
},
},
},
{
Project: "project:foo",
Repository: "TW",
Arch: "s390",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "excluded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
}
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
}

214
obs-status-service/redis.go Normal file
View File

@@ -0,0 +1,214 @@
package main
import (
"context"
"slices"
"strings"
"sync"
"time"
"github.com/redis/go-redis/v9"
"src.opensuse.org/autogits/common"
)
var RepoStatus []*common.BuildResult = []*common.BuildResult{}
var RepoStatusLock *sync.RWMutex = &sync.RWMutex{}
var redisClient *redis.Client
func RedisConnect(RedisUrl string) {
opts, err := redis.ParseURL(RedisUrl)
if err != nil {
panic(err)
}
redisClient = redis.NewClient(opts)
}
func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock()
defer RepoStatusLock.Unlock()
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
common.LogDebug(" + Updating", key)
data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil {
common.LogError("Failed fetching build results for", key, err)
}
common.LogDebug(" + Update size", len(data))
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status {
pkg.LastUpdate = reset_time
}
r.LastUpdate = time.Now()
for pkg, result := range data {
if strings.HasPrefix(result, "scheduled") {
// TODO: lookup where's building
result = "building"
}
var idx int
var found bool
var code string
var details string
if pos := strings.IndexByte(result, ':'); pos > -1 && pos < len(result) {
code = result[0:pos]
details = result[pos+1:]
} else {
code = result
details = ""
}
if idx, found = slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res := r.Status[idx]
res.LastUpdate = r.LastUpdate
res.Code = code
res.Details = details
} else {
r.Status = slices.Insert(r.Status, idx, &common.PackageBuildStatus{
Package: pkg,
Code: code,
Details: details,
LastUpdate: r.LastUpdate,
})
}
}
for idx := 0; idx < len(r.Status); {
if r.Status[idx].LastUpdate == reset_time {
r.Status = slices.Delete(r.Status, idx, idx+1)
} else {
idx++
}
}
}
func FindProjectResults(project string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindRepoResults(project, repo string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project, Repository: repo}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project && RepoStatus[idx].Repository == repo {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func FindAndUpdateRepoResults(project, repo string) []*common.BuildResult {
res := FindRepoResults(project, repo)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func RescanRepositories() error {
ctx := context.Background()
var cursor uint64
var err error
common.LogDebug("** starting rescanning ...")
RepoStatusLock.Lock()
for _, repo := range RepoStatus {
repo.Dirty = false
}
RepoStatusLock.Unlock()
var count int
for {
var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
if err != nil {
return err
}
RepoStatusLock.Lock()
for _, repo := range data {
r := strings.Split(repo, "/")
if len(r) != 3 || len(r[0]) < 8 || r[0][0:7] != "result." {
continue
}
d := &common.BuildResult{
Project: r[0][7:],
Repository: r[1],
Arch: r[2],
}
if pos, found := slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true
} else {
d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d)
count++
}
}
RepoStatusLock.Unlock()
if cursor == 0 {
break
}
}
common.LogDebug(" added a total", count, "repos")
count = 0
RepoStatusLock.Lock()
for i := 0; i < len(RepoStatus); {
if !RepoStatus[i].Dirty {
RepoStatus = slices.Delete(RepoStatus, i, i+1)
count++
} else {
i++
}
}
RepoStatusLock.Unlock()
common.LogDebug(" removed", count, "repos")
common.LogDebug(" total repos:", len(RepoStatus))
return nil
}

View File

@@ -1,82 +0,0 @@
package main
import (
"log"
"slices"
"sync"
"time"
"src.opensuse.org/autogits/common"
)
var WatchedRepos []string
var mutex sync.Mutex
var StatusUpdateCh chan StatusUpdateMsg = make(chan StatusUpdateMsg)
var statusMutex sync.RWMutex
var CurrentStatus map[string]*common.BuildResultList = make(map[string]*common.BuildResultList)
type StatusUpdateMsg struct {
ObsProject string
Result *common.BuildResultList
}
func GetCurrentStatus(project string) *common.BuildResultList {
statusMutex.RLock()
defer statusMutex.RUnlock()
if ret, found := CurrentStatus[project]; found {
return ret
} else {
go WatchObsProject(obs, project)
return nil
}
}
func ProcessUpdates() {
for {
msg := <-StatusUpdateCh
statusMutex.Lock()
CurrentStatus[msg.ObsProject] = msg.Result
drainedChannel:
for {
select {
case msg = <-StatusUpdateCh:
CurrentStatus[msg.ObsProject] = msg.Result
default:
statusMutex.Unlock()
break drainedChannel
}
}
}
}
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
old_state := ""
mutex.Lock()
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
mutex.Unlock()
return
} else {
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
mutex.Unlock()
}
LogDebug("+ watching", ObsProject)
opts := common.BuildResultOptions{}
for {
state, err := obs.BuildStatusWithState(ObsProject, &opts)
if err != nil {
log.Println(" *** Error fetching build for", ObsProject, err)
time.Sleep(time.Minute)
} else {
opts.OldState = state.State
LogDebug(" --> update", ObsProject, " => ", old_state)
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
}
}
}

View File

@@ -1,34 +0,0 @@
package main
import (
"testing"
"go.uber.org/mock/gomock"
"src.opensuse.org/autogits/common"
mock_common "src.opensuse.org/autogits/common/mock"
)
func TestWatchObsProject(t *testing.T) {
tests := []struct {
name string
res common.BuildResultList
}{
{
name: "two requests",
res: common.BuildResultList{
State: "success",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctl := gomock.NewController(t)
obs := mock_common.NewMockObsStatusFetcherWithState(ctl)
obs.EXPECT().BuildStatusWithState("test:foo", "").Return(&test.res, nil)
WatchObsProject(obs, "test:foo")
})
}
}

119
obs-status-service/svg.go Normal file
View File

@@ -0,0 +1,119 @@
package main
import (
"bytes"
"fmt"
"slices"
)
type SvgWriter struct {
ypos float64
header []byte
out bytes.Buffer
}
func NewSvg() *SvgWriter {
svg := &SvgWriter{}
svg.header = []byte(`<svg version="2.0" overflow="auto" width="40ex" height="`)
svg.out.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
return svg
}
func (svg *SvgWriter) WriteTitle(title string) {
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + title + "</text>")
svg.ypos += 2.5
}
func (svg *SvgWriter) WriteSubtitle(subtitle string) {
svg.out.WriteString(`<use href="#repotitle" y="` + fmt.Sprint(svg.ypos-2) + `em"/>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + subtitle + `</text>`)
svg.ypos += 2
}
func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
StatusToSVG := func(S string) string {
switch S {
case "succeeded":
return "s"
case "failed":
return "f"
case "broken", "scheduled":
return S
case "blocked":
return "b"
case "building":
return "build"
case "unresolvable":
return "u"
case "disabled":
return "d"
case "excluded":
return "e"
}
return "un"
}
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + arch + `</text>`)
svg.out.WriteString(`<g>`)
if len(loglink) > 0 {
svg.out.WriteString(`<a href="` + loglink + `">`)
}
svg.out.WriteString(`<use href="#` + StatusToSVG(status) + `" x="20ex" y="` + fmt.Sprint(svg.ypos-1.7) + `em"/>`)
if len(loglink) > 0 {
svg.out.WriteString(`</a>`)
}
if len(detail) > 0 {
svg.out.WriteString(`<title>` + fmt.Sprint(detail) + "</title>")
}
svg.out.WriteString("</g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) GenerateSvg() []byte {
return slices.Concat(svg.header, []byte(fmt.Sprint(svg.ypos)), svg.out.Bytes(), []byte("</svg>"))
}

View File

@@ -526,7 +526,7 @@ func main() {
log.Fatal(err)
}
var defs common.ListenDefinitions
defs := &common.RabbitMQGiteaEventsProcessor{}
var err error
if len(*basePath) == 0 {
@@ -557,7 +557,7 @@ func main() {
}
log.Println("*** Reconfiguring ***")
updateConfiguration(*configFilename, &defs.Orgs)
defs.UpdateTopics()
defs.Connection().UpdateTopics(defs)
}
}()
signal.Notify(signalChannel, syscall.SIGHUP)
@@ -573,18 +573,17 @@ func main() {
updateConfiguration(*configFilename, &defs.Orgs)
defs.GitAuthor = GitAuthor
defs.RabbitURL, err = url.Parse(*rabbitUrl)
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
if err != nil {
log.Panicf("cannot parse server URL. Err: %#v\n", err)
}
go consistencyCheckProcess()
log.Println("defs:", defs)
log.Println("defs:", *defs)
defs.Handlers = make(map[string]common.RequestProcessor)
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
log.Fatal(defs.ProcessRabbitMQEvents())
log.Fatal(common.ProcessRabbitMQEvents(defs))
}

View File

@@ -162,9 +162,9 @@ func main() {
checker := CreateDefaultStateChecker(*checkOnStart, req, Gitea, time.Duration(*checkIntervalHours)*time.Hour)
go checker.ConsistencyCheckProcess()
listenDefs := common.ListenDefinitions{
listenDefs := &common.RabbitMQGiteaEventsProcessor{
Orgs: orgs,
GitAuthor: GitAuthor,
// GitAuthor: GitAuthor,
Handlers: map[string]common.RequestProcessor{
common.RequestType_PR: req,
common.RequestType_PRSync: req,
@@ -172,7 +172,7 @@ func main() {
common.RequestType_PRReviewRejected: req,
},
}
listenDefs.RabbitURL, _ = url.Parse(*rabbitUrl)
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
common.PanicOnError(listenDefs.ProcessRabbitMQEvents())
common.PanicOnError(common.ProcessRabbitMQEvents(listenDefs))
}

View File

@@ -6,6 +6,7 @@ import (
"fmt"
"path"
"runtime/debug"
"slices"
"strings"
"github.com/opentracing/opentracing-go/log"
@@ -17,6 +18,33 @@ func prGitBranchNameForPR(repo string, prNo int) string {
return fmt.Sprintf("PR_%s#%d", repo, prNo)
}
func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
title_refs := make([]string, 0, len(prset.PRs)-1)
refs := make([]string, 0, len(prset.PRs)-1)
for _, pr := range prset.PRs {
org, repo, idx := pr.PRComponents()
title_refs = append(title_refs, repo)
ref := fmt.Sprintf(common.PrPattern, org, repo, idx)
refs = append(refs, ref)
}
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ",\n")
if prset.Config.ManualMergeOnly {
desc = desc + "\n\nManualMergeOnly enabled. To merge, 'merge ok' is required in either the project PR or every package PR."
}
if prset.Config.ManualMergeProject {
desc = desc + "\nManualMergeProject enabled. To merge, 'merge ok' is required by project maintainer in the project PR."
}
if !prset.Config.ManualMergeOnly && !prset.Config.ManualMergeProject {
desc = desc + "\nAutomatic merge enabled. This will merge when all review requirements are satisfied."
}
return
}
func verifyRepositoryConfiguration(repo *models.Repository) error {
if repo.AutodetectManualMerge && repo.AllowManualMerge {
return nil
@@ -80,46 +108,42 @@ func AllocatePRProcessor(req *common.PullRequestWebhookEvent, configs common.Aut
}
common.LogDebug("git path:", git.GetPath())
// git.GitExecOrPanic("", "config", "set", "--global", "advice.submoduleMergeConflict", "false")
// git.GitExecOrPanic("", "config", "set", "--global", "advice.mergeConflict", "false")
return &PRProcessor{
config: config,
git: git,
}, nil
}
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string, []string, error) {
prjGitPR, err := prset.GetPrjGitPR()
if err != nil {
return nil, nil, err
}
func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) error {
git := pr.git
subList, err := git.GitSubmoduleList(common.DefaultGitPrj, "HEAD")
if err != nil {
common.LogError("Error fetching submodule list for PrjGit", err)
return nil, nil, err
return err
}
refs := make([]string, 0, len(prset.PRs))
title_refs := make([]string, 0, len(prset.PRs))
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
org, repo, idx := pr.PRComponents()
prHead := pr.PR.Head.Sha
revert := false
if pr.PR.State != "open" {
// remove PR from PrjGit
var valid bool
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
common.LogError("Failed fetching original submodule commit id for repo")
return nil, nil, err
prjGitPR, err := prset.GetPrjGitPR()
if prjGitPR != nil {
// remove PR from PrjGit
var valid bool
if prHead, valid = git.GitSubmoduleCommitId(common.DefaultGitPrj, repo, prjGitPR.PR.MergeBase); !valid {
common.LogError("Failed fetching original submodule commit id for repo")
return err
}
}
revert = true
}
@@ -134,9 +158,6 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string,
if revert {
commitMsg = fmt.Sprintln("auto-created for", repo, "\n\nThis commit was autocreated by", GitAuthor, "removing\n", ref)
} else {
refs = append(refs, ref)
title_refs = append(title_refs, repo)
}
updateSubmoduleInPR(submodulePath, prHead, git)
@@ -155,7 +176,7 @@ func (pr *PRProcessor) SetSubmodulesToMatchPRSet(prset *common.PRSet) ([]string,
common.LogError("Failed to find expected repo:", repo)
}
}
return title_refs, refs, nil
return nil
}
func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet) error {
@@ -166,17 +187,16 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
common.LogError("Failed to fetch PrjGit repository data.", PrjGitOrg, PrjGitRepo, err)
return err
}
remoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
RemoteName, err := git.GitClone(common.DefaultGitPrj, PrjGitBranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, remoteName+"/"+PrjGitBranch)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", "-B", prjGitPRbranch, RemoteName+"/"+PrjGitBranch)
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -186,11 +206,9 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
}
if !common.IsDryRun && headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch,
"Forwarded PRs: "+strings.Join(title_refs, ", "),
fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor)+strings.Join(refs, ", "),
)
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", RemoteName, "+HEAD:"+prjGitPRbranch))
title, desc := PrjGitDescription(prset)
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
if err != nil {
common.LogError("Error creating PrjGit PR:", err)
return err
@@ -199,12 +217,40 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
RemoveDeadline: true,
})
prset.AddPR(pr)
prinfo := prset.AddPR(pr)
prinfo.RemoteName = RemoteName
}
return nil
}
func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch string) error {
git := pr.git
PrjGitPR, err := prset.GetPrjGitPR()
common.PanicOnError(err)
remoteBranch := PrjGitPR.RemoteName + "/" + branch
common.LogDebug("Rebasing on top of", remoteBranch)
for conflict := git.GitExec(common.DefaultGitPrj, "rebase", remoteBranch); conflict != nil; {
statuses, err := git.GitStatus(common.DefaultGitPrj)
if err != nil {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
common.PanicOnError(err)
}
for _, s := range statuses {
if s.SubmoduleChanges != "S..." {
git.GitExecOrPanic(common.DefaultGitPrj, "rebase", "--abort")
return fmt.Errorf("Unexpected conflict in rebase. %s", s)
}
}
conflict = git.GitExec(common.DefaultGitPrj, "rebase", "--skip")
}
return nil
}
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
_, _, PrjGitBranch := prset.Config.GetPrjGit()
PrjGitPR, err := prset.GetPrjGitPR()
if err != nil {
common.LogError("Updating PrjGitPR but not found?", err)
@@ -215,16 +261,23 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
PrjGit := PrjGitPR.PR.Base.Repo
prjGitPRbranch := PrjGitPR.PR.Head.Name
remoteName, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
PrjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, prjGitPRbranch, PrjGit.SSHURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitBranch)
forcePush := false
// trust Gitea here on mergeability
if !PrjGitPR.PR.Mergeable {
common.PanicOnError(pr.RebaseAndSkipSubmoduleCommits(prset, PrjGitBranch))
forcePush = true
}
headCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
if err != nil {
common.LogError("Failed to fetch PrjGit branch", prjGitPRbranch, err)
return err
}
title_refs, refs, err := pr.SetSubmodulesToMatchPRSet(prset)
if err != nil {
if err := pr.SetSubmodulesToMatchPRSet(prset); err != nil {
return err
}
newHeadCommit, err := git.GitBranchHead(common.DefaultGitPrj, prjGitPRbranch)
@@ -234,12 +287,14 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
}
if !common.IsDryRun && headCommit != newHeadCommit {
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", remoteName, "+HEAD:"+prjGitPRbranch))
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
// update PR
PrjGitTitle := "Forwarded PRs: " + strings.Join(title_refs, ", ")
PrjGitBody := fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, ", ")
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
RemoveDeadline: true,
Title: PrjGitTitle,
@@ -285,9 +340,7 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
common.LogInfo("PR State is closed:", prjGitPR.PR.State)
for _, pr := range prset.PRs {
if pr.PR.State == "open" {
org := pr.PR.Base.Repo.Owner.UserName
repo := pr.PR.Base.Repo.Name
idx := pr.PR.Index
org, repo, idx := pr.PRComponents()
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
State: "closed",
})
@@ -296,6 +349,14 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
return nil
}
if len(prset.PRs) > 1 {
for _, pr := range prset.PRs {
if prset.IsPrjGitPR(pr.PR) {
continue
}
}
}
if err = pr.UpdatePrjGitPR(prset); err != nil {
return err
}
@@ -314,13 +375,10 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
// make sure that prjgit is consistent and only submodules that are to be *updated*
// reset anything that changed that is not part of the prset
// package removals/additions are *not* counted here
org, repo, branch := config.GetPrjGit()
if pr, err := prset.GetPrjGitPR(); err == nil {
remote, err := git.GitClone(common.DefaultGitPrj, prjGitPRbranch, pr.PR.Base.Repo.CloneURL)
common.PanicOnError(err)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", remote, pr.PR.MergeBase, pr.PR.Head.Ref)
common.LogDebug("Fetch done")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.MergeBase)
common.LogDebug("Submodule parse begin")
orig_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.RemoteName+"/"+branch) // merge base must remote branch, checked in prjgit udate
common.PanicOnError(err)
new_subs, err := git.GitSubmoduleList(common.DefaultGitPrj, pr.PR.Head.Sha)
common.PanicOnError(err)
@@ -357,7 +415,6 @@ func (pr *PRProcessor) Process(req *common.PullRequestWebhookEvent) error {
}
common.LogDebug(" num of reviewers:", len(prjGitPR.PR.RequestedReviewers))
org, repo, branch := config.GetPrjGit()
maintainers, err := common.FetchProjectMaintainershipData(Gitea, org, repo, branch)
if err != nil {
return err