11 Commits

Author SHA256 Message Date
f2089f99fc staging: use helper function to SetCommitStatus 2025-09-09 12:55:14 +02:00
10ea3a8f8f obs-staging-bot: Fix setting of commit status 2025-09-09 12:46:43 +02:00
9faa6ead49 Log errors on SetCommitStatus 2025-09-09 12:46:21 +02:00
29cce5741a staging: typo fix 2025-09-09 12:46:11 +02:00
804e542c3f Decline too large staging projects
In most cases anyway an error in pull request.
2025-09-09 12:41:07 +02:00
72899162b0 status: need to fetch repositories during sync
We need to fetch repositories so that we can have package
data. We only need to fetch one set of results per project,
not all repos.
2025-09-03 16:42:01 +02:00
168a419bbe status: allow for package search endpoint
OBS has issues searching for packages in scmsynced projects.
Since we have a list of all the repositories, we can allow
for a search endpoint here.

/search?q=term1&q=term2...

results is JSON

[
   project1/pkgA,
   project2/pkgB
]
2025-09-03 14:35:15 +02:00
6a71641295 common: take care of empty result sets
In case of empty result pages, we should ignore the X-Total-Count
header.

Fixes: 5addde0a71
2025-09-03 12:21:07 +02:00
5addde0a71 common: use X-Total-Count in multi-page results 2025-09-03 01:00:33 +02:00
90ea1c9463 common: remove duplicate 2025-09-02 20:50:23 +02:00
a4fb3e6151 PR: Don't clobber other's PrjGit description
If we did not create the PRjGit PR, don't touch the title
and description

Closes: #68
2025-09-02 19:47:47 +02:00
5 changed files with 183 additions and 45 deletions

View File

@@ -24,11 +24,13 @@ import (
"fmt" "fmt"
"io" "io"
"log" "log"
"net/http"
"net/url" "net/url"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"slices" "slices"
"strconv"
"time" "time"
transport "github.com/go-openapi/runtime/client" transport "github.com/go-openapi/runtime/client"
@@ -182,7 +184,6 @@ type Gitea interface {
GiteaCommitStatusGetter GiteaCommitStatusGetter
GiteaCommitStatusSetter GiteaCommitStatusSetter
GiteaSetRepoOptions GiteaSetRepoOptions
GiteaTimelineFetcher
GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error) GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error)
GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error) GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error)
@@ -199,7 +200,32 @@ type Gitea interface {
GetCurrentUser() (*models.User, error) GetCurrentUser() (*models.User, error)
} }
type GiteaHeaderInterceptor struct {
Length int
http.RoundTripper
}
func (i *GiteaHeaderInterceptor) RoundTrip(req *http.Request) (*http.Response, error) {
resp, err := i.RoundTripper.RoundTrip(req)
if err != nil {
return nil, err
}
count_header := resp.Header["X-Total-Count"]
if len(count_header) == 1 {
i.Length, err = strconv.Atoi(resp.Header["X-Total-Count"][0])
if err != nil {
LogError("Converting X-Total-Count response header error", err)
i.Length = -1
return nil, err
}
} else {
i.Length = -1
}
return resp, nil
}
type GiteaTransport struct { type GiteaTransport struct {
headers *GiteaHeaderInterceptor
transport *transport.Runtime transport *transport.Runtime
client *apiclient.GiteaAPI client *apiclient.GiteaAPI
} }
@@ -212,7 +238,9 @@ func AllocateGiteaTransport(giteaUrl string) Gitea {
log.Panicln("Failed to parse gitea url:", err) log.Panicln("Failed to parse gitea url:", err)
} }
r.headers = &GiteaHeaderInterceptor{RoundTripper: http.DefaultTransport}
r.transport = transport.New(url.Host, apiclient.DefaultBasePath, [](string){url.Scheme}) r.transport = transport.New(url.Host, apiclient.DefaultBasePath, [](string){url.Scheme})
r.transport.Transport = r.headers
r.transport.DefaultAuthentication = transport.BearerToken(giteaToken) r.transport.DefaultAuthentication = transport.BearerToken(giteaToken)
r.client = apiclient.New(r.transport, nil) r.client = apiclient.New(r.transport, nil)
@@ -287,10 +315,9 @@ func (gitea *GiteaTransport) ManualMergePR(org, repo string, num int64, commitid
} }
func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRequest, error) { func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRequest, error) {
var page, limit int64 var page int64
prs := make([]*models.PullRequest, 0) prs := make([]*models.PullRequest, 0)
limit = 20
state := "open" state := "open"
for { for {
@@ -302,16 +329,18 @@ func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRe
WithOwner(org). WithOwner(org).
WithRepo(repo). WithRepo(repo).
WithState(&state). WithState(&state).
WithPage(&page). WithPage(&page),
WithLimit(&limit),
gitea.transport.DefaultAuthentication) gitea.transport.DefaultAuthentication)
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", org, repo, err) return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", org, repo, err)
} }
if len(req.Payload) == 0 {
break
}
prs = slices.Concat(prs, req.Payload) prs = slices.Concat(prs, req.Payload)
if len(req.Payload) < int(limit) { if len(prs) >= gitea.headers.Length {
break break
} }
} }
@@ -320,21 +349,23 @@ func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRe
} }
func (gitea *GiteaTransport) GetCommitStatus(org, repo, hash string) ([]*models.CommitStatus, error) { func (gitea *GiteaTransport) GetCommitStatus(org, repo, hash string) ([]*models.CommitStatus, error) {
page := int64(1) var page int64
limit := int64(10)
var res []*models.CommitStatus var res []*models.CommitStatus
for { for {
page++
r, err := gitea.client.Repository.RepoListStatuses( r, err := gitea.client.Repository.RepoListStatuses(
repository.NewRepoListStatusesParams().WithDefaults().WithOwner(org).WithRepo(repo).WithSha(hash).WithPage(&page).WithLimit(&limit), repository.NewRepoListStatusesParams().WithDefaults().WithOwner(org).WithRepo(repo).WithSha(hash).WithPage(&page),
gitea.transport.DefaultAuthentication) gitea.transport.DefaultAuthentication)
if err != nil { if err != nil {
return res, err return res, err
} }
if len(r.Payload) == 0 {
break
}
res = append(res, r.Payload...) res = append(res, r.Payload...)
if len(r.Payload) < int(limit) { if len(res) >= gitea.headers.Length {
break break
} }
} }
@@ -377,19 +408,18 @@ func (gitea *GiteaTransport) GetRepository(org, pkg string) (*models.Repository,
} }
func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error) { func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error) {
limit := int64(20)
var page int64 var page int64
var allReviews []*models.PullReview var allReviews []*models.PullReview
for { for {
page++
reviews, err := gitea.client.Repository.RepoListPullReviews( reviews, err := gitea.client.Repository.RepoListPullReviews(
repository.NewRepoListPullReviewsParams(). repository.NewRepoListPullReviewsParams().
WithDefaults(). WithDefaults().
WithOwner(org). WithOwner(org).
WithRepo(project). WithRepo(project).
WithIndex(PRnum). WithIndex(PRnum).
WithPage(&page). WithPage(&page),
WithLimit(&limit),
gitea.transport.DefaultAuthentication, gitea.transport.DefaultAuthentication,
) )
@@ -397,11 +427,13 @@ func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum in
return nil, err return nil, err
} }
allReviews = slices.Concat(allReviews, reviews.Payload) if len(reviews.Payload) == 0 {
if len(reviews.Payload) < int(limit) { break
}
allReviews = slices.Concat(allReviews, reviews.Payload)
if len(allReviews) >= gitea.headers.Length {
break break
} }
page++
} }
return allReviews, nil return allReviews, nil
@@ -469,7 +501,6 @@ const (
) )
func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error) { func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error) {
bigLimit := int64(20)
ret := make([]*models.NotificationThread, 0, 100) ret := make([]*models.NotificationThread, 0, 100)
for page := int64(1); ; page++ { for page := int64(1); ; page++ {
@@ -477,7 +508,6 @@ func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]
WithDefaults(). WithDefaults().
WithSubjectType([]string{Type}). WithSubjectType([]string{Type}).
WithStatusTypes([]string{"unread"}). WithStatusTypes([]string{"unread"}).
WithLimit(&bigLimit).
WithPage(&page) WithPage(&page)
if since != nil { if since != nil {
@@ -490,8 +520,11 @@ func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]
return nil, err return nil, err
} }
if len(list.Payload) == 0 {
break
}
ret = slices.Concat(ret, list.Payload) ret = slices.Concat(ret, list.Payload)
if len(list.Payload) < int(bigLimit) { if len(ret) >= gitea.headers.Length {
break break
} }
} }
@@ -500,7 +533,6 @@ func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]
} }
func (gitea *GiteaTransport) GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error) { func (gitea *GiteaTransport) GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error) {
limit := int64(20)
t := true t := true
if page <= 0 { if page <= 0 {
@@ -511,7 +543,6 @@ func (gitea *GiteaTransport) GetDoneNotifications(Type string, page int64) ([]*m
WithAll(&t). WithAll(&t).
WithSubjectType([]string{Type}). WithSubjectType([]string{Type}).
WithStatusTypes([]string{"read"}). WithStatusTypes([]string{"read"}).
WithLimit(&limit).
WithPage(&page), WithPage(&page),
gitea.transport.DefaultAuthentication) gitea.transport.DefaultAuthentication)
if err != nil { if err != nil {
@@ -564,9 +595,12 @@ func (gitea *GiteaTransport) GetOrganizationRepositories(orgName string) ([]*mod
if len(ret.Payload) == 0 { if len(ret.Payload) == 0 {
break break
} }
repos = append(repos, ret.Payload...) repos = append(repos, ret.Payload...)
page++ page++
if len(repos) >= gitea.headers.Length {
break
}
} }
return repos, nil return repos, nil
@@ -780,15 +814,18 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
resCount = len(res.Payload) resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount) LogDebug("page:", page, "len:", resCount)
if resCount == 0 {
break
}
page++ page++
for _, d := range res.Payload { retData = append(retData, res.Payload...)
if d != nil { if len(retData) >= gitea.headers.Length {
retData = append(retData, d) break
}
} }
} }
LogDebug("total results:", len(retData)) LogDebug("total results:", len(retData))
retData = slices.DeleteFunc(retData, func(a *models.TimelineComment) bool { return a == nil })
slices.SortFunc(retData, func(a, b *models.TimelineComment) int { slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
return time.Time(b.Created).Compare(time.Time(a.Created)) return time.Time(b.Created).Compare(time.Time(a.Created))
}) })

View File

@@ -322,10 +322,13 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(pkg)) urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(pkg))
} }
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
if len(meta.ScmSync) >= 65535 {
return nil, errors.New("Reached max amount of package changes per request")
}
meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name) meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name)
// QE wants it published ... also we should not hardcode it here, since // QE wants it published ... also we should not hardcode it here, since
// it is configurable via the :PullRequest project // it is configurable via the :PullRequest project
// meta.PublicFlags = common.Flags{Contents: "<disable/>"} // meta.PublicFlags = common.Flags{Contents: "<disable/>"}
meta.Groups = nil meta.Groups = nil
meta.Persons = nil meta.Persons = nil
@@ -633,6 +636,14 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
return false // cleaned up now, but the cleanup was not aleady done return false // cleaned up now, but the cleanup was not aleady done
} }
func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.CommitStatus) error {
_, err := gitea.SetCommitStatus(org, repo, hash, status)
if err != nil {
common.LogError(err)
}
return err
}
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) { func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
dir, err := os.MkdirTemp(os.TempDir(), BotName) dir, err := os.MkdirTemp(os.TempDir(), BotName)
common.PanicOnError(err) common.PanicOnError(err)
@@ -837,6 +848,22 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
TargetURL: ObsWebHost + "/project/show/" + stagingProject, TargetURL: ObsWebHost + "/project/show/" + stagingProject,
} }
if err != nil {
msg := "Unable to setup stage project " + stagingConfig.ObsProject
status.Status = common.CommitStatus_Fail
common.LogError(msg)
if !IsDryRun {
SetStatus(gitea, org, repo, pr.Head.Sha, status)
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, msg)
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
return false, nil
}
msg := "Changed source updated for build" msg := "Changed source updated for build"
if change == RequestModificationProjectCreated { if change == RequestModificationProjectCreated {
msg = "Build is started in " + ObsWebHost + "/project/show/" + msg = "Build is started in " + ObsWebHost + "/project/show/" +
@@ -845,8 +872,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if len(stagingConfig.QA) > 0 { if len(stagingConfig.QA) > 0 {
msg = msg + "\nAdditional QA builds: \n" msg = msg + "\nAdditional QA builds: \n"
} }
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status) SetStatus(gitea, org, repo, pr.Head.Sha, status)
for _, setup := range stagingConfig.QA { for _, setup := range stagingConfig.QA {
CreateQASubProject(stagingConfig, git, gitea, pr, CreateQASubProject(stagingConfig, git, gitea, pr,
stagingProject, stagingProject,
@@ -870,32 +896,34 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
} }
buildStatus := ProcessBuildStatus(stagingResult, baseResult) buildStatus := ProcessBuildStatus(stagingResult, baseResult)
done := false
switch buildStatus { switch buildStatus {
case BuildStatusSummarySuccess: case BuildStatusSummarySuccess:
status.Status = common.CommitStatus_Success status.Status = common.CommitStatus_Success
done = true
if !IsDryRun { if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "Build successful") _, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "Build successful")
if err != nil { if err != nil {
common.LogError(err) common.LogError(err)
} else {
return true, nil
} }
} }
case BuildStatusSummaryFailed: case BuildStatusSummaryFailed:
status.Status = common.CommitStatus_Fail status.Status = common.CommitStatus_Fail
done = true
if !IsDryRun { if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed") _, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed")
if err != nil { if err != nil {
common.LogError(err) common.LogError(err)
} else {
return true, nil
} }
} }
} }
common.LogInfo("Build status:", buildStatus) common.LogInfo("Build status:", buildStatus)
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status) if !IsDryRun {
if err = SetStatus(gitea, org, repo, pr.Head.Sha, status); err != nil {
// waiting for build results -- nothing to do return false, err
}
}
return done, nil
} else if err == NonActionableReviewError || err == NoReviewsFoundError { } else if err == NonActionableReviewError || err == NoReviewsFoundError {
return true, nil return true, nil

View File

@@ -20,6 +20,7 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json"
"flag" "flag"
"fmt" "fmt"
"io" "io"
@@ -268,6 +269,33 @@ func main() {
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"})) res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
}) })
http.HandleFunc("GET /search", func(res http.ResponseWriter, req *http.Request) {
common.LogInfo("GET /serach?" + req.URL.RawQuery)
queries := req.URL.Query()
if !queries.Has("q") {
res.WriteHeader(400)
return
}
names := queries["q"]
if len(names) != 1 {
res.WriteHeader(400)
return
}
packages := FindPackages(names[0])
data, err := json.MarshalIndent(packages, "", " ")
if err != nil {
res.WriteHeader(500)
common.LogError("Error in marshalling data.", err)
return
}
res.Write(data)
res.Header().Add("content-type", "application/json")
res.WriteHeader(200)
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) { http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project") prj := req.PathValue("Project")
pkg := req.PathValue("Package") pkg := req.PathValue("Package")

View File

@@ -29,13 +29,15 @@ func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock() RepoStatusLock.Lock()
defer RepoStatusLock.Unlock() defer RepoStatusLock.Unlock()
updateResultsWithoutLocking(r)
}
func updateResultsWithoutLocking(r *common.BuildResult) {
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
common.LogDebug(" + Updating", key)
data, err := redisClient.HGetAll(context.Background(), key).Result() data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil { if err != nil {
common.LogError("Failed fetching build results for", key, err) common.LogError("Failed fetching build results for", key, err)
} }
common.LogDebug(" + Update size", len(data))
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local) reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status { for _, pkg := range r.Status {
@@ -110,6 +112,27 @@ func FindRepoResults(project, repo string) []*common.BuildResult {
return ret return ret
} }
func FindPackages(pkg string) []string {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
data := make([]string, 0, 100)
for _, repo := range RepoStatus {
for _, status := range repo.Status {
if pkg == status.Package {
entry := repo.Project + "/" + pkg
if idx, found := slices.BinarySearch(data, entry); !found {
data = slices.Insert(data, idx, entry)
if len(data) >= 100 {
return data
}
}
}
}
}
return data
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult { func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project) res := FindProjectResults(project)
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
@@ -161,6 +184,8 @@ func RescanRepositories() error {
RepoStatusLock.Unlock() RepoStatusLock.Unlock()
var count int var count int
projectsLooked := make([]string, 0, 10000)
for { for {
var data []string var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result() data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
@@ -169,6 +194,7 @@ func RescanRepositories() error {
return err return err
} }
wg := &sync.WaitGroup{}
RepoStatusLock.Lock() RepoStatusLock.Lock()
for _, repo := range data { for _, repo := range data {
r := strings.Split(repo, "/") r := strings.Split(repo, "/")
@@ -180,14 +206,28 @@ func RescanRepositories() error {
Repository: r[1], Repository: r[1],
Arch: r[2], Arch: r[2],
} }
if pos, found := slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
var pos int
var found bool
if pos, found = slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true RepoStatus[pos].Dirty = true
} else { } else {
d.Dirty = true d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d) RepoStatus = slices.Insert(RepoStatus, pos, d)
count++ count++
} }
// fetch all keys, one per non-maintenance/non-home: projects, for package search
if idx, found := slices.BinarySearch(projectsLooked, d.Project); !found && !strings.Contains(d.Project, ":Maintenance:") && (len(d.Project) < 5 || d.Project[0:5] != "home:") {
projectsLooked = slices.Insert(projectsLooked, idx, d.Project)
wg.Add(1)
go func(r *common.BuildResult) {
updateResultsWithoutLocking(r)
wg.Done()
}(RepoStatus[pos])
}
} }
wg.Wait()
RepoStatusLock.Unlock() RepoStatusLock.Unlock()
if cursor == 0 { if cursor == 0 {

View File

@@ -317,9 +317,14 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
} }
PrjGitTitle, PrjGitBody := PrjGitDescription(prset) PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
if PrjGitPR.PR.Title != PrjGitTitle || PrjGitPR.PR.Body != PrjGitBody { if PrjGitPR.PR.User.UserName == CurrentUser.UserName {
common.LogDebug("New title:", PrjGitTitle) if PrjGitPR.PR.Title != PrjGitTitle || PrjGitPR.PR.Body != PrjGitBody {
common.LogDebug(PrjGitBody) common.LogDebug("New title:", PrjGitTitle)
common.LogDebug(PrjGitBody)
}
} else {
// TODO: find our first comment in timeline
} }
if !common.IsDryRun { if !common.IsDryRun {