71 Commits

Author SHA256 Message Date
51b3d5942d wip 2025-10-13 09:02:06 +02:00
04825b552e pr: use force-merge instead of force-push
The permission is to accept a change without required reviews, not
to actually force-push

Fixes 7bad8eb5a9
2025-10-12 10:22:49 +02:00
ca7966f3e0 pr: sanity check
make sure that the checked out PR matches what Gitea is sending
us, otherwise pause for a few seconds and retry.
2025-10-11 18:10:15 +02:00
0c47ca4d32 pr: updating PR needs to update the SHA
If we are updating a Project Git PR, we need to save the updated
hash or we may be lookign at pre-update PR for various operations,
including merging.

This mostly only affects project gits in devel projects where
the project git could be updated by direct workflow bot, but then
the project is incorrectly resulting in no package update.
2025-10-11 17:25:48 +02:00
7bad8eb5a9 pr: Add config definitions for permission set 2025-10-09 18:43:56 +02:00
c2c60b77e5 use autogits package prefix 2025-10-08 13:03:06 +02:00
76b5a5dc0d import: factory hash setting utility 2025-10-07 22:55:58 +02:00
58da491049 common: handle translation to SSH if already SSH 2025-10-07 17:26:27 +02:00
626bead304 status: improve request logging 2025-10-06 14:07:35 +02:00
30bac996f4 status: redundant entry in service file 2025-10-06 14:03:38 +02:00
9adc718b6f spec: hujson moved to utils subpackage 2025-10-06 13:52:39 +02:00
070f45bc25 status: add no lock function
Locking is not re-entrant, so these are useful if we need
to find things while we already lock the strctures
2025-10-06 13:49:19 +02:00
d061f29699 status: use env as parameters to service
Instead of having to rewrite the service file with parameters,
leverage Env file to pass default parameters values.
2025-10-06 13:49:12 +02:00
f6fd96881d staging: improve docs 2025-10-02 17:40:00 +02:00
2be785676a reparent: add readme 2025-10-02 17:05:34 +02:00
1b9ee2d46a PR: ref requries PR fetch, and not in timeline 2025-10-02 15:13:43 +02:00
b7bbafacf8 PR: limit search to bot account for ProjectGit PRs 2025-10-02 13:45:31 +02:00
240896f101 status: fix delete function logic 2025-10-01 19:35:43 +02:00
a7b326fceb status: limit results to specific packages 2025-10-01 19:28:47 +02:00
76ed03f86f status: add json output support
if Accept: application/json is present, return JSON output
of build results instead of SVG
2025-10-01 18:58:08 +02:00
1af2f53755 PR: Fix case where PR repo != target repo
Was using a check that the label has the repo name in it, but
this is not always reliable. So, check repo.ID if it's the same.
2025-10-01 15:33:39 +02:00
0de9071f92 group-review: we need to clone before modifying a slice 2025-09-30 17:27:36 +02:00
855faea659 imported devel:openSUSE:Factory 2025-09-29 15:10:25 +02:00
dbd581ffef import: packages are not just in factory
Some packages share names of openSUSE:Factory packages but actually
have nothing in common with them. So before importing the Factory
package, check if the package is actually a devel project for Factory
and only proceed if it is. Otherwise, assume that the devel
project package is independent.
2025-09-29 15:08:30 +02:00
1390225614 PR: list missing PRs in the logs 2025-09-29 14:58:43 +02:00
a03491f75c Keep maintainers from staging template project
They need to keep access as they might need to be able to modify the
stage project. They could grant access anyway, by adding themselfs
as they own the upper project. No reason to force them the
extra trip or to hide build results first to them
2025-09-24 10:39:07 +02:00
2092fc4f42 Fix handling of all project flags
We skipped access and sourceaccess flags before
2025-09-24 09:33:29 +02:00
d2973f4792 PR: only consider open PR when creating new PRs 2025-09-21 23:21:40 +02:00
58022c6edc update transition project list 2025-09-21 20:21:15 +02:00
994e6b3ca2 status: fix typo regression 2025-09-18 19:18:03 +02:00
6414336ee6 status: add basic project level build results 2025-09-18 19:05:35 +02:00
1104581eb6 status: superflous Sprintf 2025-09-18 16:50:59 +02:00
6ad110e5d3 status: escape strings 2025-09-18 16:30:34 +02:00
e39ce302b8 status: fix README 2025-09-18 13:07:08 +02:00
3f216dc275 docs: improve README 2025-09-16 22:30:18 +02:00
8af7e58534 common: handle group data in PR reviews 2025-09-16 18:13:35 +02:00
043673d9ac common: handle ReviewGroup in maintainership data
ReviewGroups can be added as maintainers and can be optionally
expanded. This is handy when a ReviewGroup is a project maintainer
2025-09-16 17:40:18 +02:00
73737be16a rabbitmq: add support for forwarding status events to Rabbit 2025-09-16 13:23:43 +02:00
1d3ed81ac5 staging: use https cloning always
We can just pass the token, so SSH option for cloning is obsolete
2025-09-16 10:11:28 +02:00
49c4784e70 staging: handle case of no staging config 2025-09-15 17:58:21 +02:00
be15c86973 staging: use correct gitea token 2025-09-15 17:44:52 +02:00
72857db561 staging: add gitea token as user for cloning
For private repos, we need to identify ourselves to Gitea
2025-09-15 17:28:38 +02:00
faf53aaae2 move staging-bot env file to /etc/default 2025-09-15 14:00:56 +02:00
9e058101f0 Merge commit '4ae45d9913dcd473fc931c27dcc5dee93c70723121bf6f77c256c7dd196ea768' of src.opensuse.org:adamm/autogits 2025-09-15 13:54:46 +02:00
Elisei Roca
4ae45d9913 Look for configuration file in /etc/default
Files in /etc/sysconfig/ should have all the sysconfig headers an so on.
2025-09-15 13:24:18 +02:00
56cf8293ed staging: clone via target repo only 2025-09-15 12:30:08 +02:00
fd5b3598bf Don't crash when new packages got added
The build result request of the base project is failing in this
situation, since the requested package does not exist.

Therefore we need to have seperate lists for proper handling.
2025-09-13 15:47:50 +02:00
9dd5a57b81 staging: fix no config case 2025-09-11 17:22:02 +02:00
1cd385e227 common: handle case of non-existing config file 2025-09-11 16:56:03 +02:00
3c20eb567b staging: allow wider character set in pr regex 2025-09-11 16:09:42 +02:00
ff7df44d37 staging: assume changed directories are packages
Ignore any non-top level direcotries here. This should be fixed
to handle _manifest files
2025-09-11 14:56:27 +02:00
1a19873f77 Merge remote-tracking branch 'gitea/main' 2025-09-11 09:35:05 +02:00
6a09bf021e Revert "common: use X-Total-Count in multi-page results"
This reverts commit 5addde0a71.
2025-09-11 09:34:13 +02:00
f2089f99fc staging: use helper function to SetCommitStatus 2025-09-09 12:55:14 +02:00
10ea3a8f8f obs-staging-bot: Fix setting of commit status 2025-09-09 12:46:43 +02:00
9faa6ead49 Log errors on SetCommitStatus 2025-09-09 12:46:21 +02:00
29cce5741a staging: typo fix 2025-09-09 12:46:11 +02:00
804e542c3f Decline too large staging projects
In most cases anyway an error in pull request.
2025-09-09 12:41:07 +02:00
72899162b0 status: need to fetch repositories during sync
We need to fetch repositories so that we can have package
data. We only need to fetch one set of results per project,
not all repos.
2025-09-03 16:42:01 +02:00
168a419bbe status: allow for package search endpoint
OBS has issues searching for packages in scmsynced projects.
Since we have a list of all the repositories, we can allow
for a search endpoint here.

/search?q=term1&q=term2...

results is JSON

[
   project1/pkgA,
   project2/pkgB
]
2025-09-03 14:35:15 +02:00
6a71641295 common: take care of empty result sets
In case of empty result pages, we should ignore the X-Total-Count
header.

Fixes: 5addde0a71
2025-09-03 12:21:07 +02:00
5addde0a71 common: use X-Total-Count in multi-page results 2025-09-03 01:00:33 +02:00
90ea1c9463 common: remove duplicate 2025-09-02 20:50:23 +02:00
a4fb3e6151 PR: Don't clobber other's PrjGit description
If we did not create the PRjGit PR, don't touch the title
and description

Closes: #68
2025-09-02 19:47:47 +02:00
e2abbfcc63 staging: improve cleanup logging 2025-09-01 12:49:55 +02:00
f6cb35acca spec: add obs-staging-bot.service 2025-09-01 12:29:29 +02:00
f4386c3d12 Try to use Staging Master Project as default build target if available
This allows us to set custom build configuration or repository sets for
pull request projects.
2025-09-01 11:52:30 +02:00
f8594af8c6 obs-status: report error on monitor page if error
If we have error with REDIS connection, report it as error 500
on the / default page. Otherwise, report the 404 there instead
as before.
2025-09-01 11:20:54 +02:00
b8ef69a5a7 group-review: react on comment events
Instead of just polling for events, we can use issue_comment events
to process PRs more quickly.

At same time increased default polling interval to 10 minutes if
we use events

Closes #67
2025-08-30 10:41:29 +02:00
c980b9f84d group-review: improve comment made by the bot
Bot name should be expanded for easy copy-pasta
2025-08-29 18:19:03 +02:00
4651440457 Revert "Fixing creation or PR even when we don't want it"
This reverts commit e90ba95869.

We need to assign reviews anyway...
2025-08-29 17:09:08 +02:00
41 changed files with 1729 additions and 312 deletions

View File

@@ -5,11 +5,15 @@ The bots that drive Git Workflow for package management
* devel-importer -- helper to import an OBS devel project into a Gitea organization
* gitea-events-rabbitmq-publisher -- takes all events from a Gitea organization (webhook) and publishes it on a RabbitMQ instance
* gitea-status-proxy -- allows bots without code owner permission to set Gitea's commit status
* group-review -- group review proxy
* hujson -- translates JWCC (json with commas and comments) to Standard JSON
* obs-forward-bot -- forwards PR as OBS sr (TODO)
* obs-staging-bot -- build bot for a PR
* obs-status-service -- report build status of an OBS project as an SVG
* workflow-pr -- keeps PR to _ObsPrj consistent with a PR to a package update
* workflow-direct -- update _ObsPrj based on direct pushes and repo creations/removals from organization
* staging-utils -- review tooling for PR
* staging-utils -- review tooling for PR (TODO)
- list PR
- merge PR
- split PR

View File

@@ -30,61 +30,63 @@ BuildRequires: go
Git Workflow tooling and utilities enabling automated handing of OBS projects
as git repositories
%package -n hujson
%package utils
Summary: HuJSON to JSON parser
Provides: hujson
Provides: /usr/bin/hujson
%description -n hujson
%description utils
HuJSON to JSON parser, using stdin -> stdout pipe
%package -n gitea-events-rabbitmq-publisher
%package gitea-events-rabbitmq-publisher
Summary: Publishes Gitea webhook data via RabbitMQ
%description -n gitea-events-rabbitmq-publisher
%description gitea-events-rabbitmq-publisher
Listens on an HTTP socket and publishes Gitea events on a RabbitMQ instance
with a topic
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
%package -n doc
%package doc
Summary: Common documentation files
%description -n doc
%description doc
Common documentation files
%package -n group-review
%package group-review
Summary: Reviews of groups defined in ProjectGit
%description -n group-review
%description group-review
Is used to handle reviews associated with groups defined in the
ProjectGit.
%package -n obs-staging-bot
%package obs-staging-bot
Summary: Build a PR against a ProjectGit, if review is requested
%description -n obs-staging-bot
%description obs-staging-bot
Build a PR against a ProjectGit, if review is requested.
%package -n obs-status-service
%package obs-status-service
Summary: Reports build status of OBS service as an easily to produce SVG
%description -n obs-status-service
%description obs-status-service
Reports build status of OBS service as an easily to produce SVG
%package -n workflow-direct
%package workflow-direct
Summary: Keep ProjectGit in sync for a devel project
%description -n workflow-direct
%description workflow-direct
Keep ProjectGit in sync with packages in the organization of a devel project
%package -n workflow-pr
%package workflow-pr
Summary: Keeps ProjectGit PR in-sync with a PackageGit PR
%description -n workflow-pr
%description workflow-pr
Keeps ProjectGit PR in-sync with a PackageGit PR
@@ -94,7 +96,7 @@ cp -r /home/abuild/rpmbuild/SOURCES/* ./
%build
go build \
-C hujson \
-C utils/hujson \
-buildmode=pie
go build \
-C gitea-events-rabbitmq-publisher \
@@ -120,59 +122,87 @@ install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publishe
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
install -D -m0755 hujson/hujson %{buildroot}%{_bindir}/hujson
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
%pre -n gitea-events-rabbitmq-publisher
%pre gitea-events-rabbitmq-publisher
%service_add_pre gitea-events-rabbitmq-publisher.service
%post -n gitea-events-rabbitmq-publisher
%post gitea-events-rabbitmq-publisher
%service_add_post gitea-events-rabbitmq-publisher.service
%preun -n gitea-events-rabbitmq-publisher
%preun gitea-events-rabbitmq-publisher
%service_del_preun gitea-events-rabbitmq-publisher.service
%postun -n gitea-events-rabbitmq-publisher
%postun gitea-events-rabbitmq-publisher
%service_del_postun gitea-events-rabbitmq-publisher.service
%files -n gitea-events-rabbitmq-publisher
%pre obs-staging-bot
%service_add_pre obs-staging-bot.service
%post obs-staging-bot
%service_add_post obs-staging-bot.service
%preun obs-staging-bot
%service_del_preun obs-staging-bot.service
%postun obs-staging-bot
%service_del_postun obs-staging-bot.service
%pre obs-status-service
%service_add_pre obs-status-service.service
%post obs-status-service
%service_add_post obs-status-service.service
%preun obs-status-service
%service_del_preun obs-status-service.service
%postun obs-status-service
%service_del_postun obs-status-service.service
%files gitea-events-rabbitmq-publisher
%license COPYING
%doc gitea-events-rabbitmq-publisher/README.md
%{_bindir}/gitea-events-rabbitmq-publisher
%{_unitdir}/gitea-events-rabbitmq-publisher.service
%files -n doc
%files doc
%license COPYING
%doc doc/README.md
%doc doc/workflows.md
%files -n group-review
%files group-review
%license COPYING
%doc group-review/README.md
%{_bindir}/group-review
%files -n hujson
%files utils
%license COPYING
%{_bindir}/hujson
%files -n obs-staging-bot
%files obs-staging-bot
%license COPYING
%doc obs-staging-bot/README.md
%{_bindir}/obs-staging-bot
%{_unitdir}/obs-staging-bot.service
%files -n obs-status-service
%files obs-status-service
%license COPYING
%doc obs-status-service/README.md
%{_bindir}/obs-status-service
%{_unitdir}/obs-status-service.service
%files -n workflow-direct
%files workflow-direct
%license COPYING
%doc workflow-direct/README.md
%{_bindir}/workflow-direct
%files -n workflow-pr
%files workflow-pr
%license COPYING
%doc workflow-pr/README.md
%{_bindir}/workflow-pr

View File

@@ -25,6 +25,7 @@ import (
"io"
"log"
"os"
"slices"
"strings"
"github.com/tailscale/hujson"
@@ -35,6 +36,9 @@ import (
const (
ProjectConfigFile = "workflow.config"
StagingConfigFile = "staging.config"
Permission_ForceMerge = "force-merge"
Permission_Group = "release-engineering"
)
type ConfigFile struct {
@@ -52,12 +56,18 @@ type QAConfig struct {
Origin string
}
type Permissions struct {
Permission string
Members []string
}
type AutogitConfig struct {
Workflows []string // [pr, direct, test]
Organization string
GitProjectName string // Organization/GitProjectName.git is PrjGit
Branch string // branch name of PkgGit that aligns with PrjGit submodules
Reviewers []string // only used by `pr` workflow
GitProjectName string // Organization/GitProjectName.git is PrjGit
Branch string // branch name of PkgGit that aligns with PrjGit submodules
Reviewers []string // only used by `pr` workflow
Permissions []*Permissions // only used by `pr` workflow
ReviewGroups []*ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
@@ -186,6 +196,27 @@ func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *Autogit
return nil
}
func (config *AutogitConfig) HasPermission(user, permission string) bool {
if config == nil {
return false
}
for _, p := range config.Permissions {
if p.Permission == permission {
if slices.Contains(p.Members, user) {
return true
}
for _, m := range p.Members {
if members, err := config.GetReviewGroupMembers(m); err == nil && slices.Contains(members, user) {
return true
}
}
}
}
return false
}
func (config *AutogitConfig) GetReviewGroupMembers(reviewer string) ([]string, error) {
for _, g := range config.ReviewGroups {
if g.Name == reviewer {
@@ -254,6 +285,9 @@ type StagingConfig struct {
func ParseStagingConfig(data []byte) (*StagingConfig, error) {
var staging StagingConfig
if len(data) == 0 {
return nil, errors.New("non-existent config file.")
}
data, err := hujson.Standardize(data)
if err != nil {
return nil, err

View File

@@ -190,3 +190,67 @@ func TestProjectGitParser(t *testing.T) {
})
}
}
func TestConfigPermissions(t *testing.T) {
tests := []struct {
name string
permission string
user string
config *common.AutogitConfig
result bool
}{
{
name: "NoPermissions",
permission: common.Permission_ForceMerge,
},
{
name: "NoPermissions",
permission: common.Permission_Group,
},
{
name: "Regular permission ForcePush",
permission: common.Permission_ForceMerge,
result: true,
user: "user",
config: &common.AutogitConfig{
Permissions: []*common.Permissions{
&common.Permissions{
Permission: common.Permission_ForceMerge,
Members: []string{"user"},
},
},
},
},
{
name: "User is part of a group",
permission: common.Permission_ForceMerge,
result: true,
user: "user",
config: &common.AutogitConfig{
Permissions: []*common.Permissions{
&common.Permissions{
Permission: common.Permission_ForceMerge,
Members: []string{"group"},
},
},
ReviewGroups: []*common.ReviewGroup{
&common.ReviewGroup{
Name: "group",
Reviewers: []string{"some", "members", "including", "user"},
},
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
if r := test.config.HasPermission(test.user, test.permission); r != test.result {
t.Error("Expecting", test.result, "but got opposite")
}
if r := test.config.HasPermission(test.user+test.user, test.permission); r {
t.Error("Expecting false for fake user, but got opposite")
}
})
}
}

View File

@@ -1731,3 +1731,246 @@ const requestedReviewJSON = `{
"commit_id": "",
"review": null
}`
const requestStatusJSON=`{
"commit": {
"id": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"message": "Update nodejs-common.changes\n",
"url": "https://src.opensuse.org/autogits/nodejs-common/commit/e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"author": {
"name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"username": "adamm"
},
"committer": {
"name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"username": "adamm"
},
"verification": null,
"timestamp": "2025-09-16T12:41:02+02:00",
"added": [],
"removed": [],
"modified": [
"nodejs-common.changes"
]
},
"context": "test",
"created_at": "2025-09-16T10:50:32Z",
"description": "",
"id": 21663,
"repository": {
"id": 90520,
"owner": {
"id": 983,
"login": "autogits",
"login_name": "",
"source_id": 0,
"full_name": "",
"email": "",
"avatar_url": "https://src.opensuse.org/avatars/80a61ef3a14c3c22f0b8b1885d1a75d4",
"html_url": "https://src.opensuse.org/autogits",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2024-06-20T09:46:37+02:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 0,
"following_count": 0,
"starred_repos_count": 0,
"username": "autogits"
},
"name": "nodejs-common",
"full_name": "autogits/nodejs-common",
"description": "",
"empty": false,
"private": false,
"fork": true,
"template": false,
"parent": {
"id": 62649,
"owner": {
"id": 64,
"login": "pool",
"login_name": "",
"source_id": 0,
"full_name": "",
"email": "",
"avatar_url": "https://src.opensuse.org/avatars/b10a8c0bede9eb4ea771b04db3149f28",
"html_url": "https://src.opensuse.org/pool",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2023-03-01T14:41:17+01:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 2,
"following_count": 0,
"starred_repos_count": 0,
"username": "pool"
},
"name": "nodejs-common",
"full_name": "pool/nodejs-common",
"description": "",
"empty": false,
"private": false,
"fork": false,
"template": false,
"mirror": false,
"size": 134,
"language": "",
"languages_url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common/languages",
"html_url": "https://src.opensuse.org/pool/nodejs-common",
"url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common",
"link": "",
"ssh_url": "gitea@src.opensuse.org:pool/nodejs-common.git",
"clone_url": "https://src.opensuse.org/pool/nodejs-common.git",
"original_url": "",
"website": "",
"stars_count": 0,
"forks_count": 3,
"watchers_count": 12,
"open_issues_count": 0,
"open_pr_counter": 0,
"release_counter": 0,
"default_branch": "factory",
"archived": false,
"created_at": "2024-06-17T17:08:45+02:00",
"updated_at": "2025-08-21T21:58:31+02:00",
"archived_at": "1970-01-01T01:00:00+01:00",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": true,
"internal_tracker": {
"enable_time_tracker": false,
"allow_only_contributors_to_track_time": true,
"enable_issue_dependencies": true
},
"has_wiki": false,
"has_pull_requests": true,
"has_projects": false,
"projects_mode": "all",
"has_releases": false,
"has_packages": false,
"has_actions": false,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"allow_fast_forward_only_merge": true,
"allow_rebase_update": true,
"allow_manual_merge": true,
"autodetect_manual_merge": true,
"default_delete_branch_after_merge": false,
"default_merge_style": "merge",
"default_allow_maintainer_edit": false,
"avatar_url": "",
"internal": false,
"mirror_interval": "",
"object_format_name": "sha256",
"mirror_updated": "0001-01-01T00:00:00Z",
"topics": [],
"licenses": []
},
"mirror": false,
"size": 143,
"language": "",
"languages_url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common/languages",
"html_url": "https://src.opensuse.org/autogits/nodejs-common",
"url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common",
"link": "",
"ssh_url": "gitea@src.opensuse.org:autogits/nodejs-common.git",
"clone_url": "https://src.opensuse.org/autogits/nodejs-common.git",
"original_url": "",
"website": "",
"stars_count": 0,
"forks_count": 1,
"watchers_count": 4,
"open_issues_count": 0,
"open_pr_counter": 1,
"release_counter": 0,
"default_branch": "factory",
"archived": false,
"created_at": "2024-07-01T13:29:03+02:00",
"updated_at": "2025-09-16T12:41:03+02:00",
"archived_at": "1970-01-01T01:00:00+01:00",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": false,
"has_wiki": false,
"has_pull_requests": true,
"has_projects": false,
"projects_mode": "all",
"has_releases": false,
"has_packages": false,
"has_actions": false,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"allow_fast_forward_only_merge": true,
"allow_rebase_update": true,
"allow_manual_merge": true,
"autodetect_manual_merge": true,
"default_delete_branch_after_merge": false,
"default_merge_style": "merge",
"default_allow_maintainer_edit": false,
"avatar_url": "",
"internal": false,
"mirror_interval": "",
"object_format_name": "sha256",
"mirror_updated": "0001-01-01T00:00:00Z",
"topics": [],
"licenses": [
"MIT"
]
},
"sender": {
"id": 129,
"login": "adamm",
"login_name": "",
"source_id": 0,
"full_name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"avatar_url": "https://src.opensuse.org/avatars/3e8917bfbf04293f7c20c28cacd83dae2ba9b78a6c6a9a1bedf14c683d8a3763",
"html_url": "https://src.opensuse.org/adamm",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2023-07-21T16:43:48+02:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 1,
"following_count": 0,
"starred_repos_count": 0,
"username": "adamm"
},
"sha": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"state": "pending",
"target_url": "https://src.opensuse.org/",
"updated_at": "2025-09-16T10:50:32Z"
}`

View File

@@ -40,6 +40,10 @@ type GitSubmoduleLister interface {
GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool)
}
type GitDirectoryLister interface {
GitDirectoryList(gitPath, commitId string) (dirlist map[string]string, err error)
}
type GitStatusLister interface {
GitStatus(cwd string) ([]GitStatusData, error)
}
@@ -61,6 +65,7 @@ type Git interface {
io.Closer
GitSubmoduleLister
GitDirectoryLister
GitStatusLister
GitExecWithOutputOrPanic(cwd string, params ...string) string
@@ -247,26 +252,26 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
e.GitExecOrPanic(repo, "fetch", "--prune", remoteName, remoteBranch)
}
/*
refsBytes, err := os.ReadFile(path.Join(e.GitPath, repo, ".git/refs/remotes", remoteName, "HEAD"))
if err != nil {
LogError("Cannot read HEAD of remote", remoteName)
return remoteName, fmt.Errorf("Cannot read HEAD of remote %s", remoteName)
}
/*
refsBytes, err := os.ReadFile(path.Join(e.GitPath, repo, ".git/refs/remotes", remoteName, "HEAD"))
if err != nil {
LogError("Cannot read HEAD of remote", remoteName)
return remoteName, fmt.Errorf("Cannot read HEAD of remote %s", remoteName)
}
refs := string(refsBytes)
if refs[0:5] != "ref: " {
LogError("Unexpected format of remote HEAD ref:", refs)
return remoteName, fmt.Errorf("Unexpected format of remote HEAD ref: %s", refs)
}
refs := string(refsBytes)
if refs[0:5] != "ref: " {
LogError("Unexpected format of remote HEAD ref:", refs)
return remoteName, fmt.Errorf("Unexpected format of remote HEAD ref: %s", refs)
}
if len(branch) == 0 || branch == "HEAD" {
remoteRef = strings.TrimSpace(refs[5:])
branch = remoteRef[strings.LastIndex(remoteRef, "/")+1:]
LogDebug("remoteRef", remoteRef)
LogDebug("branch", branch)
}
*/
if len(branch) == 0 || branch == "HEAD" {
remoteRef = strings.TrimSpace(refs[5:])
branch = remoteRef[strings.LastIndex(remoteRef, "/")+1:]
LogDebug("remoteRef", remoteRef)
LogDebug("branch", branch)
}
*/
args := []string{"fetch", "--prune", remoteName, branch}
if strings.TrimSpace(e.GitExecWithOutputOrPanic(repo, "rev-parse", "--is-shallow-repository")) == "true" {
args = slices.Insert(args, 1, "--unshallow")
@@ -778,6 +783,80 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
return
}
// return (filename) -> (hash) map for all submodules
func (e *GitHandlerImpl) GitDirectoryList(gitPath, commitId string) (directoryList map[string]string, err error) {
var done sync.Mutex
directoryList = make(map[string]string)
done.Lock()
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
LogDebug("Getting directory for:", commitId)
go func() {
defer done.Unlock()
defer close(data_out.ch)
data_out.Write([]byte(commitId))
data_out.ch <- '\x00'
var c GitCommit
c, err = parseGitCommit(data_in.ch)
if err != nil {
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
return
}
trees := make(map[string]string)
trees[""] = c.Tree
for len(trees) > 0 {
for p, tree := range trees {
delete(trees, p)
data_out.Write([]byte(tree))
data_out.ch <- '\x00'
var tree GitTree
tree, err = parseGitTree(data_in.ch)
if err != nil {
err = fmt.Errorf("Error parsing git tree: %w", err)
return
}
for _, te := range tree.items {
if te.isTree() {
directoryList[p+te.name] = te.hash
}
}
}
}
}()
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
cmd.Env = []string{
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
"GIT_LFS_SKIP_SMUDGE=1",
"GIT_CONFIG_GLOBAL=/dev/null",
}
cmd.Dir = filepath.Join(e.GitPath, gitPath)
cmd.Stdout = &data_in
cmd.Stdin = &data_out
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
LogError(string(data))
return len(data), nil
})
LogDebug("command run:", cmd.Args)
if e := cmd.Run(); e != nil {
LogError(e)
close(data_in.ch)
close(data_out.ch)
return directoryList, e
}
done.Lock()
return directoryList, err
}
// return (filename) -> (hash) map for all submodules
func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error) {
var done sync.Mutex

View File

@@ -182,7 +182,6 @@ type Gitea interface {
GiteaCommitStatusGetter
GiteaCommitStatusSetter
GiteaSetRepoOptions
GiteaTimelineFetcher
GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error)
GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error)
@@ -310,6 +309,9 @@ func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRe
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", org, repo, err)
}
if len(req.Payload) == 0 {
break
}
prs = slices.Concat(prs, req.Payload)
if len(req.Payload) < int(limit) {
break
@@ -332,11 +334,11 @@ func (gitea *GiteaTransport) GetCommitStatus(org, repo, hash string) ([]*models.
if err != nil {
return res, err
}
res = append(res, r.Payload...)
if len(r.Payload) < int(limit) {
if len(r.Payload) == 0 {
break
}
res = append(res, r.Payload...)
page++
}
return res, nil
@@ -397,10 +399,10 @@ func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum in
return nil, err
}
allReviews = slices.Concat(allReviews, reviews.Payload)
if len(reviews.Payload) < int(limit) {
if len(reviews.Payload) == 0 {
break
}
allReviews = slices.Concat(allReviews, reviews.Payload)
page++
}
@@ -490,6 +492,9 @@ func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]
return nil, err
}
if len(list.Payload) == 0 {
break
}
ret = slices.Concat(ret, list.Payload)
if len(list.Payload) < int(bigLimit) {
break
@@ -653,7 +658,7 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
WithBase(targetId).
WithHead(srcId),
gitea.transport.DefaultAuthentication,
); err == nil {
); err == nil && pr.Payload.State == "open" {
return pr.Payload, nil
}
@@ -780,6 +785,9 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount)
if resCount == 0 {
break
}
page++
for _, d := range res.Payload {

View File

@@ -13,10 +13,10 @@ import (
//go:generate mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
type MaintainershipData interface {
ListProjectMaintainers() []string
ListPackageMaintainers(pkg string) []string
ListProjectMaintainers(OptionalGroupExpansion []*ReviewGroup) []string
ListPackageMaintainers(Pkg string, OptionalGroupExpasion []*ReviewGroup) []string
IsApproved(pkg string, reviews []*models.PullReview, submitter string) bool
IsApproved(Pkg string, Reviews []*models.PullReview, Submitter string, ReviewGroups []*ReviewGroup) bool
}
const ProjectKey = ""
@@ -70,7 +70,7 @@ func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit
return m, err
}
func (data *MaintainershipMap) ListProjectMaintainers() []string {
func (data *MaintainershipMap) ListProjectMaintainers(groups []*ReviewGroup) []string {
if data == nil {
return nil
}
@@ -80,6 +80,11 @@ func (data *MaintainershipMap) ListProjectMaintainers() []string {
return nil
}
// expands groups
for _, g := range groups {
m = g.ExpandMaintainers(m)
}
return m
}
@@ -96,7 +101,7 @@ func parsePkgDirData(pkg string, data []byte) []string {
return pkgMaintainers
}
func (data *MaintainershipMap) ListPackageMaintainers(pkg string) []string {
func (data *MaintainershipMap) ListPackageMaintainers(pkg string, groups []*ReviewGroup) []string {
if data == nil {
return nil
}
@@ -111,7 +116,7 @@ func (data *MaintainershipMap) ListPackageMaintainers(pkg string) []string {
}
}
}
prjMaintainers := data.ListProjectMaintainers()
prjMaintainers := data.ListProjectMaintainers(nil)
prjMaintainer:
for _, prjm := range prjMaintainers {
@@ -123,15 +128,20 @@ prjMaintainer:
pkgMaintainers = append(pkgMaintainers, prjm)
}
// expands groups
for _, g := range groups {
pkgMaintainers = g.ExpandMaintainers(pkgMaintainers)
}
return pkgMaintainers
}
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview, submitter string) bool {
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview, submitter string, groups []*ReviewGroup) bool {
var reviewers []string
if pkg != ProjectKey {
reviewers = data.ListPackageMaintainers(pkg)
reviewers = data.ListPackageMaintainers(pkg, groups)
} else {
reviewers = data.ListProjectMaintainers()
reviewers = data.ListProjectMaintainers(groups)
}
if len(reviewers) == 0 {

View File

@@ -28,6 +28,8 @@ func TestMaintainership(t *testing.T) {
maintainersFile []byte
maintainersFileErr error
groups []*common.ReviewGroup
maintainersDir map[string][]byte
}{
/* PACKAGE MAINTAINERS */
@@ -51,6 +53,22 @@ func TestMaintainership(t *testing.T) {
maintainers: []string{"user1", "user2", "user3"},
packageName: "pkg",
},
{
name: "Multiple package maintainers and groups",
maintainersFile: []byte(`{"pkg": ["user1", "user2", "g2"], "": ["g2", "user1", "user3"]}`),
maintainersDir: map[string][]byte{
"_project": []byte(`{"": ["user1", "user3", "g2"]}`),
"pkg": []byte(`{"pkg": ["user1", "g2", "user2"]}`),
},
maintainers: []string{"user1", "user2", "user3", "user5"},
packageName: "pkg",
groups: []*common.ReviewGroup{
{
Name: "g2",
Reviewers: []string{"user1", "user5"},
},
},
},
{
name: "No package maintainers and only project maintainer",
maintainersFile: []byte(`{"pkg2": ["user1", "user2"], "": ["user1", "user3"]}`),
@@ -138,9 +156,9 @@ func TestMaintainership(t *testing.T) {
var m []string
if len(test.packageName) > 0 {
m = maintainers.ListPackageMaintainers(test.packageName)
m = maintainers.ListPackageMaintainers(test.packageName, test.groups)
} else {
m = maintainers.ListProjectMaintainers()
m = maintainers.ListProjectMaintainers(test.groups)
}
if len(m) != len(test.maintainers) {
@@ -207,7 +225,7 @@ func TestMaintainershipFileWrite(t *testing.T) {
{
name: "2 project maintainers and 2 single package maintainers",
maintainers: map[string][]string{
"": {"two", "one"},
"": {"two", "one"},
"pkg1": {},
"foo": {"four", "byte"},
},

View File

@@ -127,10 +127,12 @@ type ProjectMeta struct {
Groups []GroupRepoMeta `xml:"group"`
Repositories []RepositoryMeta `xml:"repository"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
Access Flags `xml:"access"`
SourceAccess Flags `xml:"sourceaccess"`
}
type PackageMeta struct {
@@ -140,6 +142,12 @@ type PackageMeta struct {
ScmSync string `xml:"scmsync"`
Persons []PersonRepoMeta `xml:"person"`
Groups []GroupRepoMeta `xml:"group"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
SourceAccess Flags `xml:"sourceaccess"`
}
type UserMeta struct {

View File

@@ -9,6 +9,7 @@ import (
"slices"
"strings"
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
"src.opensuse.org/autogits/common/gitea-generated/models"
)
@@ -62,13 +63,15 @@ func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRIn
var Timeline_RefIssueNotFound error = errors.New("RefIssue not found on the timeline")
func LastPrjGitRefOnTimeline(gitea GiteaPRTimelineFetcher, org, repo string, num int64, prjGitOrg, prjGitRepo string) (*models.PullRequest, error) {
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
timeline, err := gitea.GetTimeline(org, repo, num)
if err != nil {
LogError("Failed to fetch timeline for", org, repo, "#", num, err)
return nil, err
}
prjGitOrg, prjGitRepo, prjGitBranch := config.GetPrjGit()
for idx := len(timeline) - 1; idx >= 0; idx-- {
item := timeline[idx]
issue := item.RefIssue
@@ -78,6 +81,24 @@ func LastPrjGitRefOnTimeline(gitea GiteaPRTimelineFetcher, org, repo string, num
issue.Repository.Owner == prjGitOrg &&
issue.Repository.Name == prjGitRepo {
if !config.NoProjectGitPR {
if issue.User.UserName != botUser {
continue
}
}
pr, err := gitea.GetPullRequest(prjGitOrg, prjGitRepo, issue.Index)
switch err.(type) {
case *repository.RepoGetPullRequestNotFound: // deleted?
continue
default:
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
}
if pr.Base.Ref != prjGitBranch {
continue
}
_, prs := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(item.RefIssue.Body)))
for _, pr := range prs {
if pr.Org == org && pr.Repo == repo && pr.Num == num {
@@ -104,7 +125,7 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
return nil, err
}
} else {
if pr, err = LastPrjGitRefOnTimeline(gitea, org, repo, num, prjGitOrg, prjGitRepo); err != nil && err != Timeline_RefIssueNotFound {
if pr, err = LastPrjGitRefOnTimeline(user, gitea, org, repo, num, config); err != nil && err != Timeline_RefIssueNotFound {
return nil, err
}
@@ -231,11 +252,11 @@ func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintaine
reviewers = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
if len(rs.PRs) == 1 {
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers())
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers(nil))
}
} else {
pkg := pr.PR.Base.Repo.Name
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(), maintainers.ListPackageMaintainers(pkg), configReviewers.PkgOptional)
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(nil), maintainers.ListPackageMaintainers(pkg, nil), configReviewers.PkgOptional)
}
slices.Sort(reviewers)
@@ -289,13 +310,14 @@ func (rs *PRSet) RemoveClosedPRs() {
func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData) bool {
configReviewers := ParseReviewers(rs.Config.Reviewers)
is_manually_reviewed_ok := false
if need_manual_review := rs.Config.ManualMergeOnly || rs.Config.ManualMergeProject; need_manual_review {
// Groups are expanded here because any group member can issue "merge ok" to the BotUser
groups := rs.Config.ReviewGroups
prjgit, err := rs.GetPrjGitPR()
if err == nil && prjgit != nil {
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers())
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers(groups))
LogDebug("Fetching reviews for", prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
r, err := FetchGiteaReviews(gitea, reviewers, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
if err != nil {
@@ -315,7 +337,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
}
pkg := pr.PR.Base.Repo.Name
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg))
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg, groups))
LogDebug("Fetching reviews for", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
if err != nil {
@@ -356,18 +378,22 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
if err != nil {
LogError("Cannot fetch gita reaviews for PR:", err)
LogError("Cannot fetch gitea reaviews for PR:", err)
return false
}
is_manually_reviewed_ok = r.IsApproved()
LogDebug(pr.PR.Base.Repo.Name, is_manually_reviewed_ok)
LogDebug("PR to", pr.PR.Base.Repo.Name, "reviewed?", is_manually_reviewed_ok)
if !is_manually_reviewed_ok {
if GetLoggingLevel() > LogLevelInfo {
LogDebug("missing reviewers:", r.MissingReviews())
}
return false
}
if need_maintainer_review := !rs.IsPrjGitPR(pr.PR) || pr.PR.User.UserName != rs.BotUser; need_maintainer_review {
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName); !is_manually_reviewed_ok {
// Do not expand groups here, as the group-review-bot will ACK if group has reviewed.
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
LogDebug(" not approved?", pkg)
return false
}
@@ -415,7 +441,6 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
// we can only resolve conflicts with .gitmodules
for _, s := range status {
if s.Status == GitStatus_Unmerged {
panic("Can't handle conflicts yet")
if s.Path != ".gitmodules" {
return err
}
@@ -424,10 +449,10 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
if err != nil {
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
}
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
/*s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
}*/
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
@@ -437,10 +462,11 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
/*
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
}*/
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
@@ -450,11 +476,11 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
// merge from subs3 (target), subs2 (2-nd base that is missing from target base)
// this will update submodules
mergedSubs := slices.Concat(subs1, subs2, subs3)
mergedSubs := slices.Concat(subs2, subs3)
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs2), len(subs3))+5)
nextSub:
for subName := range submodules {

View File

@@ -608,9 +608,9 @@ func TestPR(t *testing.T) {
*/
maintainers := mock_common.NewMockMaintainershipData(ctl)
maintainers.EXPECT().ListPackageMaintainers(gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().ListProjectMaintainers().Return([]string{}).AnyTimes()
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any(), gomock.Any()).Return(true).AnyTimes()
maintainers.EXPECT().ListPackageMaintainers(gomock.Any(), gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(true).AnyTimes()
if isApproved := res.IsApproved(review_mock, maintainers); isApproved != test.reviewed {
t.Error("expected reviewed to be NOT", isApproved)
@@ -817,8 +817,8 @@ func TestPRAssignReviewers(t *testing.T) {
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
review_mock.EXPECT().GetTimeline("org", "repo", int64(42)).Return(test.prjTimeline, nil)
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo").Return([]string{"pkgmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo", gomock.Any()).Return([]string{"pkgmaintainer"}).AnyTimes()
prs, _ := common.FetchPRSet("test", pr_mock, "other", "pkgrepo", int64(1), &test.config)
if len(prs.PRs) != 2 {
@@ -880,7 +880,7 @@ func TestPRAssignReviewers(t *testing.T) {
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
review_mock.EXPECT().GetTimeline("org", "repo", int64(1)).Return(nil, nil)
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
prs, _ := common.FetchPRSet("test", pr_mock, "org", "repo", int64(1), &test.config)
if len(prs.PRs) != 1 {

View File

@@ -46,6 +46,7 @@ const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Status = "status"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {

62
common/request_status.go Normal file
View File

@@ -0,0 +1,62 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"encoding/json"
"fmt"
"io"
)
type Status struct {
}
type StatusWebhookEvent struct {
Id uint64
Context string
Description string
Sha string
State string
TargetUrl string
Commit Commit
Repository Repository
Sender *User
}
func (s *StatusWebhookEvent) GetAction() string {
return s.State
}
func (h *RequestHandler) ParseStatusRequest(data io.Reader) (*StatusWebhookEvent, error) {
action := new(StatusWebhookEvent)
err := json.NewDecoder(data).Decode(&action)
if err != nil {
return nil, fmt.Errorf("Got error while parsing: %w", err)
}
h.StdLogger.Printf("Request status for repo: %s#%s\n", action.Repository.Full_Name, action.Sha)
h.Request = &Request{
Type: RequestType_Status,
Data: action,
}
return action, nil
}

View File

@@ -0,0 +1,40 @@
package common_test
import (
"os"
"strings"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusRequestParsing(t *testing.T) {
t.Run("parsing repo creation message", func(t *testing.T) {
var h common.RequestHandler
h.StdLogger, h.ErrLogger = common.CreateStdoutLogger(os.Stdout, os.Stdout)
json, err := h.ParseStatusRequest(strings.NewReader(requestStatusJSON))
if err != nil {
t.Fatalf("Can't parse struct: %s", err)
}
if json.GetAction() != "pending" {
t.Fatalf("json.action is '%#v'", json)
}
if json.Repository.Full_Name != "autogits/nodejs-common" ||
json.Repository.Parent == nil ||
json.Repository.Parent.Parent != nil ||
len(json.Repository.Ssh_Url) < 10 ||
json.Repository.Default_Branch != "factory" ||
json.Repository.Object_Format_Name != "sha256" {
t.Fatalf("invalid repository parse: %#v", json.Repository)
}
if json.Sha != "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e" {
t.Fatal("Invalid SHA:", json.Sha)
}
})
}

17
common/review_group.go Normal file
View File

@@ -0,0 +1,17 @@
package common
import (
"slices"
)
func (group *ReviewGroup) ExpandMaintainers(maintainers []string) []string {
idx := slices.Index(maintainers, group.Name)
if idx == -1 {
return maintainers
}
expandedMaintainers := slices.Replace(maintainers, idx, idx+1, group.Reviewers...)
slices.Sort(expandedMaintainers)
return slices.Compact(expandedMaintainers)
}

View File

@@ -0,0 +1,62 @@
package common_test
import (
"slices"
"testing"
"src.opensuse.org/autogits/common"
)
func TestMaintainerGroupReplacer(t *testing.T) {
GroupName := "my_group"
tests := []struct {
name string
reviewers []string
group_members []string
output []string
}{
{
name: "empty",
},
{
name: "group not maintainer",
reviewers: []string{"a", "b"},
group_members: []string{"g1", "g2"},
output: []string{"a", "b"},
},
{
name: "group maintainer",
reviewers: []string{"b", "my_group"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
{
name: "sorted group maintainer",
reviewers: []string{"my_group", "b"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
{
name: "group maintainer dedup",
reviewers: []string{"my_group", "g2", "b"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
g := &common.ReviewGroup{
Name: GroupName,
Reviewers: test.group_members,
}
expandedList := g.ExpandMaintainers(test.reviewers)
if slices.Compare(expandedList, test.output) != 0 {
t.Error("Expected:", test.output, "but have", expandedList)
}
})
}
}

View File

@@ -128,6 +128,16 @@ func (r *PRReviews) IsApproved() bool {
return goodReview
}
func (r *PRReviews) MissingReviews() []string {
missing := []string{}
for _, reviewer := range r.reviewers {
if !r.IsReviewedBy(reviewer) {
missing = append(missing, reviewer)
}
}
return missing
}
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
if !slices.Contains(r.reviewers, reviewer) {
return false

View File

@@ -54,6 +54,10 @@ func TranslateHttpsToSshUrl(url string) (string, error) {
url2_len = len(url2)
)
if len(url) > 10 && (url[0:10] == "gitea@src." || url[0:10] == "ssh://gite") {
return url, nil
}
if len(url) > url1_len && url[0:url1_len] == url1 {
return "ssh://gitea@src.opensuse.org/" + url[url1_len:], nil
}

View File

@@ -0,0 +1,171 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
use JSON;
sub FindFactoryCommit {
my ($package) = @_;
# Execute osc cat and capture output
my $osc_cmd = "osc cat openSUSE:Factory $package $package.changes";
open( my $osc_fh, "$osc_cmd |" ) or die "Failed to run osc: $!";
my $data = do { local $/; <$osc_fh> };
close($osc_fh);
# Calculate size
my $size = length($data);
# Create blob header
my $blob = "blob $size\0$data";
# Open a pipe to openssl to compute the hash
my ( $reader, $writer );
my $pid = open2( $reader, $writer, "openssl sha256" );
# Send blob data
print $writer $blob;
close $writer;
# Read the hash result and extract it
my $hash_line = <$reader>;
waitpid( $pid, 0 );
my ($hash) = $hash_line =~ /([a-fA-F0-9]{64})/;
# Run git search command with the hash
print("looking for hash: $hash\n");
my @hashes;
my $git_cmd =
"git -C $package rev-list --all pool/HEAD | while read commit; do git -C $package ls-tree \"\$commit\" | grep -q '^100644 blob $hash' && echo \"\$commit\"; done";
open( my $git_fh, "$git_cmd |" ) or die "Failed to run git search: $!";
while ( my $commit = <$git_fh> ) {
chomp $commit;
print "Found commit $commit\n";
push( @hashes, $commit );
}
close($git_fh);
return @hashes;
}
sub ListPackages {
my ($project) = @_;
open( my $osc_fh,
"curl -s https://src.opensuse.org/openSUSE/Factory/raw/branch/main/pkgs/_meta/devel_packages | awk '{ if ( \$2 == \"$project\" ) print \$1 }' |" )
or die "Failed to run curl: $!";
my @packages = <$osc_fh>;
chomp @packages;
close($osc_fh);
return @packages;
}
# Read project from first argument
sub Usage {
die "Usage: $0 <OBS Project> <org>";
}
my $project = shift or Usage();
my $org = shift or Usage();
my @packages = ListPackages($project);
my $pkg = shift;
@packages = ($pkg) if defined $pkg;
my @tomove;
my @toremove;
print "Verify packages in /pool ...\n";
# verify that packages in devel project is a fork from pool.
for my $pkg ( sort(@packages) ) {
my $data = `git obs api /repos/$org/$pkg 2> /dev/null`;
if ( length($data) == 0 ) {
die "Repo missing in $org: $pkg";
}
else {
my $repo = decode_json($data);
if ( !$repo->{parent}
|| $repo->{parent}->{owner}->{username} ne "pool" )
{
if ( system("git obs api /repos/pool/$pkg > /dev/null 2> /dev/null") == 0 ) {
print "=== $pkg NOT A FORK of exiting package\n";
push( @toremove, $pkg );
}
else {
print "$pkg NEEDS transfer\n";
push( @tomove, $pkg );
}
}
}
}
if ( scalar @toremove > 0 ) {
print "ABORTING. Need repos removed.\n";
print "@toremove\n";
exit(1);
}
if ( scalar @tomove > 0 ) {
# for my $pkg (@tomove) {
# system("git obs api -X POST --data '{\"new_owner\": \"pool\"}' /repos/$org/$pkg/transfer 2> /dev/null > /dev/null");
# }
print "ABORTING. Need to move reps.\n";
print "Initiated transfer for @tomove\n";
exit(1);
}
print "Verify complete.\n";
for my $package ( sort(@packages) ) {
print " ----- PROCESSING $package\n";
my $url = "https://src.opensuse.org/$org/$package.git";
my $push_url = "gitea\@src.opensuse.org:pool/$package.git";
if ( not -e $package ) {
print("cloning...\n");
system("git clone --origin pool $url") == 0
or die "Can't clone $org/$package";
}
else {
print("adding remote...\n");
system("git -C $package remote rm pool > /dev/null");
system("git -C $package remote add pool $url") == 0
or die "Can't add pool for $package";
}
system("git -C $package remote set-url pool --push $push_url") == 0
or die "Can't add push remote for $package";
print("fetching remote...\n");
system("git -C $package fetch pool") == 0
or ( push( @tomove, $package ) and die "Can't fetch pool for $package" );
my @commits = FindFactoryCommit($package);
my $c;
my $match = 0;
for my $commit (@commits) {
if ( length($commit) != 64 ) {
print("Failed to find factory commit. Aborting.");
exit(1);
}
if (
system("git -C $package lfs fetch pool $commit") == 0
and system("git -C $package checkout -B factory $commit") == 0
and system("git -C $package lfs checkout") == 0
and system(
"cd $package; osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | md5sum -c --quiet"
) == 0
)
{
$c = $commit;
$match = 1;
last;
}
}
if ( !$match ) {
die "Match not found. Aborting.";
}
#system ("git -C $package push -f pool factory");
print "$package: $c\n";
}

View File

@@ -274,6 +274,13 @@ func findMissingDevelBranch(git common.Git, pkg, project string) {
}
func importFactoryRepoAndCheckHistory(pkg string, meta *common.PackageMeta) (factoryRepo *models.Repository, retErr error) {
devel_project, err := devel_projects.GetDevelProject(pkg)
if err != nil {
return nil, fmt.Errorf("Error finding devel project for '%s'. Assuming independent: %w", pkg, err)
} else if devel_project != prj {
return nil, fmt.Errorf("Not factory devel project -- importing package '%s' as independent: %w", pkg, err)
}
if repo, err := client.Repository.RepoGet(repository.NewRepoGetParams().WithDefaults().WithOwner("pool").WithRepo(giteaPackage(pkg)), r.DefaultAuthentication); err != nil || repo.Payload.ObjectFormatName != "sha256" {
if err != nil && !errors.Is(err, &repository.RepoGetNotFound{}) {
log.Panicln(err)
@@ -323,13 +330,9 @@ func importFactoryRepoAndCheckHistory(pkg string, meta *common.PackageMeta) (fac
return
}
devel_project, err := devel_projects.GetDevelProject(pkg)
common.LogDebug("Devel project:", devel_project, err)
if err == common.DevelProjectNotFound {
// assume it's this project, maybe removed from factory
devel_project = prj
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
common.PanicOnError(gitImporter(prj, pkg))
}
common.LogDebug("finding missing branches in", pkg, devel_project)
findMissingDevelBranch(git, pkg, devel_project)
return
}
@@ -502,9 +505,15 @@ func importDevelRepoAndCheckHistory(pkg string, meta *common.PackageMeta) *model
common.PanicOnError(os.RemoveAll(path.Join(git.GetPath(), pkg)))
}
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
devel_project, _ := devel_projects.GetDevelProject(pkg)
if devel_project == prj {
if err := gitImporter("openSUSE:Factory", pkg); err != nil {
common.PanicOnError(gitImporter(prj, pkg))
}
} else {
common.PanicOnError(gitImporter(prj, pkg))
}
if p := strings.TrimSpace(git.GitExecWithOutputOrPanic(pkg, "rev-list", "--max-parents=0", "--count", "factory")); p != "1" {
common.LogError("Failed to import package:", pkg)
common.PanicOnError(fmt.Errorf("Expecting 1 root in after devel import, but have %s", p))

View File

@@ -6,10 +6,12 @@ devel:languages:erlang:Factory
devel:languages:hare
devel:languages:javascript
devel:languages:lua
devel:languages:perl
devel:openSUSE:Factory
network:dhcp
network:im:whatsapp
network:messaging:xmpp
server:dns
systemsmanagement:cockpit
systemsmanagement:wbem
X11:lxde

View File

@@ -298,6 +298,22 @@ func parseRequestJSONOrg(reqType string, data []byte) (org *common.Organization,
org = pr.Repository.Owner
extraAction = ""
case common.RequestType_Status:
status := common.StatusWebhookEvent{}
if err = json.Unmarshal(data, &status); err != nil {
return
}
switch status.State {
case "pending", "success", "error", "failure":
break
default:
err = fmt.Errorf("Unknown Status' state: %s", status.State)
return
}
org = status.Repository.Owner
extraAction = status.State
case common.RequestType_Wiki:
wiki := common.WikiWebhookEvent{}
if err = json.Unmarshal(data, &wiki); err != nil {

View File

@@ -174,6 +174,22 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
return
}
if err := ProcessPR(pr); err == nil && !common.IsDryRun {
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
} else if err != nil && err != ReviewNotFinished {
common.LogError(err)
}
}
var ReviewNotFinished = fmt.Errorf("Review is not finished")
func ProcessPR(pr *models.PullRequest) error {
org := pr.Base.Repo.Owner.UserName
repo := pr.Base.Repo.Name
id := pr.Index
found := false
for _, reviewer := range pr.RequestedReviewers {
if reviewer != nil && reviewer.UserName == groupName {
@@ -183,46 +199,36 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
}
if !found {
common.LogInfo(" review is not requested for", groupName)
if !common.IsDryRun {
gitea.SetNotificationRead(notification.ID)
}
return
return nil
}
config := configs.GetPrjGitConfig(org, repo, pr.Base.Name)
if config == nil {
common.LogError("Cannot find config for:", fmt.Sprintf("%s/%s!%s", org, repo, pr.Base.Name))
return
return fmt.Errorf("Cannot find config for: %s", pr.URL)
}
if pr.State == "closed" {
// dismiss the review
common.LogInfo(" -- closed request, so nothing to review")
if !common.IsDryRun {
gitea.SetNotificationRead(notification.ID)
}
return
return nil
}
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
if err != nil {
common.LogInfo(" ** No reviews associated with request:", subject.URL, "Error:", err)
return
return fmt.Errorf("Failed to fetch reviews for: %v: %w", pr.URL, err)
}
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(gitea, groupName, pr.Head.Sha, org, repo, id)
if err != nil {
common.LogError(err)
return
return fmt.Errorf("Failed to fetch timeline to review. %w", err)
}
groupConfig, err := config.GetReviewGroup(groupName)
if err != nil {
common.LogError(err)
return
return fmt.Errorf("Failed to fetch review group. %w", err)
}
// submitter cannot be reviewer
requestReviewers := groupConfig.Reviewers
requestReviewers := slices.Clone(groupConfig.Reviewers)
requestReviewers = slices.DeleteFunc(requestReviewers, func(u string) bool { return u == pr.User.UserName })
// pr.Head.Sha
@@ -238,13 +244,10 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
}
UnrequestReviews(gitea, org, repo, id, requestReviewers)
}
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
}
common.LogInfo(" -> approved by", reviewer)
common.LogInfo(" review at", review.Created)
return
return nil
} else if ReviewRejected(review.Body) {
if !common.IsDryRun {
text := reviewer + " requested changes on behalf of " + groupName + ". See " + review.HTMLURL
@@ -255,12 +258,9 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
}
UnrequestReviews(gitea, org, repo, id, requestReviewers)
}
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
}
common.LogInfo(" -> declined by", reviewer)
return
return nil
}
}
}
@@ -290,15 +290,22 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
}
if !found_help_comment && !common.IsDryRun {
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\nDo **not** use standard review interface to review on behalf of the group.\nTo accept the review on behalf of the group, create the following comment: `@<bot>: approve`.\nTo request changes on behalf of the group, create the following comment: `@<bot>: decline` followed with lines justifying the decision.\nFuture edits of the comments are ignored, a new comment is required to change the review state.")
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
"Do **not** use standard review interface to review on behalf of the group.\n"+
"To accept the review on behalf of the group, create the following comment: `@"+groupName+": approve`.\n"+
"To request changes on behalf of the group, create the following comment: `@"+groupName+": decline` followed with lines justifying the decision.\n"+
"Future edits of the comments are ignored, a new comment is required to change the review state.")
if slices.Contains(groupConfig.Reviewers, pr.User.UserName) {
helpComment = helpComment + "\n\n" + fmt.Sprintln("Submitter is member of this review group, hence they are excluded from being one of the reviewers here")
helpComment = helpComment + "\n\n" +
"Submitter is member of this review group, hence they are excluded from being one of the reviewers here"
}
gitea.AddComment(pr, helpComment)
}
return ReviewNotFinished
}
func PeriodReviewCheck(gitea common.Gitea) {
func PeriodReviewCheck() {
notifications, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
if err != nil {
common.LogError(" Error fetching unread notifications: %w", err)
@@ -307,14 +314,15 @@ func PeriodReviewCheck(gitea common.Gitea) {
for _, notification := range notifications {
ProcessNotifications(notification, gitea)
}
}
var gitea common.Gitea
func main() {
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance used for reviews")
rabbitMqHost := flag.String("rabbit-url", "amqps://rabbit.opensuse.org", "RabbitMQ instance where Gitea webhook notifications are sent")
interval := flag.Int64("interval", 5, "Notification polling interval in minutes (min 1 min)")
interval := flag.Int64("interval", 10, "Notification polling interval in minutes (min 1 min)")
configFile := flag.String("config", "", "PrjGit listing config file")
logging := flag.String("logging", "info", "Logging level: [none, error, info, debug]")
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry run, no effect. For debugging")
@@ -351,7 +359,7 @@ func main() {
return
}
gitea := common.AllocateGiteaTransport(*giteaUrl)
gitea = common.AllocateGiteaTransport(*giteaUrl)
configs, err = common.ResolveWorkflowConfigs(gitea, configData)
if err != nil {
common.LogError("Cannot parse workflow configs:", err)
@@ -395,10 +403,13 @@ func main() {
config_modified: make(chan *common.AutogitConfig),
}
process_issue_pr := IssueCommentProcessor{}
configUpdates := &common.RabbitMQGiteaEventsProcessor{
Orgs: []string{},
Handlers: map[string]common.RequestProcessor{
common.RequestType_Push: &config_update,
common.RequestType_Push: &config_update,
common.RequestType_IssueComment: &process_issue_pr,
},
}
configUpdates.Connection().RabbitURL = u
@@ -435,7 +446,7 @@ func main() {
}
}
PeriodReviewCheck(gitea)
PeriodReviewCheck()
time.Sleep(time.Duration(*interval * int64(time.Minute)))
}
}

View File

@@ -7,6 +7,25 @@ import (
"src.opensuse.org/autogits/common"
)
type IssueCommentProcessor struct{}
func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
if req.Type != common.RequestType_IssueComment {
return fmt.Errorf("Unhandled, ignored request type: %s", req.Type)
}
data := req.Data.(*common.IssueCommentWebhookEvent)
org := data.Repository.Owner.Username
repo := data.Repository.Name
index := int64(data.Issue.Number)
pr, err := gitea.GetPullRequest(org, repo, index)
if err != nil {
return fmt.Errorf("Failed to fetch PullRequest from event: %s/%s!%d Error: %w", org, repo, index, err)
}
return ProcessPR(pr)
}
type ConfigUpdatePush struct {
config_modified chan *common.AutogitConfig
}

View File

@@ -0,0 +1,7 @@
Purpose
-------
Forwards PR as an OBS submit request when review requested.
Accepts a request when OBS request is accepted.
Rejects a request when OBS request is denied.

View File

@@ -16,3 +16,27 @@ Target Usage
Any project (devel, etc) that accepts PR and wants build results
Configuration File
------------------
Bot reads `staging.config` from the project git or the PR to the project git.
It's a JSON file with following syntax
```
{
"ObsProject": "home:foo:project",
"StagingProject": "home:foo:project:staging",
"QA": [
{
"Name": "ProjectBuild",
"Origin": "home:foo:product:images"
}
]
}
```
* ObsProject: (**required**) Project where the base project is built. Builds in this project will be used to compare to builds based on sources from the PR
* StagingProject: template project that will be used as template for the staging project. Omitting this will use the ObsProject repositories to create the staging. Staging project will be created under the template, or in the bot's home directory if not specified.
* QA: set of projects to build ontop of the binaries built in staging.

View File

@@ -47,15 +47,18 @@ const (
Username = "autogits_obs_staging_bot"
)
var GiteaToken string
var runId uint
func FetchPrGit(git common.Git, pr *models.PullRequest) error {
// clone PR head and base and return path
cloneURL := pr.Head.Repo.CloneURL
if GiteaUseSshClone {
cloneURL = pr.Head.Repo.SSHURL
}
// clone PR head via base (target) repo
cloneURL := pr.Base.Repo.CloneURL
// pass our token as user always
user, err := url.Parse(cloneURL)
common.PanicOnError(err)
user.User = url.User(common.GetGiteaToken())
cloneURL = user.String()
if _, err := os.Stat(path.Join(git.GetPath(), pr.Head.Sha)); os.IsNotExist(err) {
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", cloneURL, pr.Head.Sha))
common.PanicOnError(git.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.MergeBase))
@@ -263,7 +266,7 @@ func ProcessRepoBuildStatus(results, ref []*common.PackageBuildStatus) (status B
return BuildStatusSummarySuccess, SomeSuccess
}
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string) (*common.ProjectMeta, error) {
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
common.LogDebug("repo content fetching ...")
err := FetchPrGit(git, pr)
if err != nil {
@@ -289,7 +292,32 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
}
meta, err := ObsClient.GetProjectMeta(buildPrj)
// find modified directories and assume they are packages
// TODO: use _manifest for this here
headDirectories, err := git.GitDirectoryList(dir, pr.Head.Sha)
if err != nil {
return nil, err
}
baseDirectories, err := git.GitDirectoryList(dir, pr.MergeBase)
if err != nil {
return nil, err
}
for pkg, headOid := range headDirectories {
if baseOid, exists := baseDirectories[pkg]; !exists || baseOid != headOid {
modifiedOrNew = append(modifiedOrNew, pkg)
}
}
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
if err == nil {
// success, so we use that staging master project as our build project
buildPrj = stagingMasterPrj
} else {
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
meta, err = ObsClient.GetProjectMeta(buildPrj)
}
if err != nil {
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
return nil, err
@@ -314,13 +342,13 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(pkg))
}
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
if len(meta.ScmSync) >= 65535 {
return nil, errors.New("Reached max amount of package changes per request")
}
meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name)
// QE wants it published ... also we should not hardcode it here, since
// it is configurable via the :PullRequest project
// meta.PublicFlags = common.Flags{Contents: "<disable/>"}
meta.Groups = nil
meta.Persons = nil
// Untouched content are flags and involved users. These can be configured
// via the staging project.
// set paths to parent project
for idx, r := range meta.Repositories {
@@ -414,7 +442,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
var state RequestModification = RequestModificationSourceChanged
if meta == nil {
// new build
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject)
common.LogDebug(" Staging master:", config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
if err != nil {
return RequestModificationNoChange, err
}
@@ -428,6 +457,8 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
} else {
err = ObsClient.SetProjectMeta(meta)
if err != nil {
x, _ := xml.MarshalIndent(meta, "", " ")
common.LogDebug(" meta:", string(x))
common.LogError("cannot create meta project:", err)
return RequestModificationNoChange, err
}
@@ -584,7 +615,7 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
}
if !pr.HasMerged && time.Since(time.Time(pr.Closed)) < time.Duration(config.CleanupDelay)*time.Hour {
common.LogInfo("Cooldown period for cleanup of", thread.URL)
common.LogInfo("Cooldown period for cleanup of", thread.Subject.HTMLURL)
return false
}
@@ -622,6 +653,14 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
return false // cleaned up now, but the cleanup was not aleady done
}
func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.CommitStatus) error {
_, err := gitea.SetCommitStatus(org, repo, hash, status)
if err != nil {
common.LogError(err)
}
return err
}
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
dir, err := os.MkdirTemp(os.TempDir(), BotName)
common.PanicOnError(err)
@@ -684,6 +723,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
return true, err
}
if stagingConfig.ObsProject == "" {
@@ -696,7 +736,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
if err != nil {
if err != nil || meta == nil {
common.LogError("Cannot find reference project meta:", stagingConfig.ObsProject, err)
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot fetch reference project meta")
@@ -757,23 +797,28 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogDebug(" # head submodules:", len(headSubmodules))
common.LogDebug(" # base submodules:", len(baseSubmodules))
modifiedOrNew := make([]string, 0, 16)
modifiedPackages := make([]string, 0, 16)
newPackages := make([]string, 0, 16)
if !stagingConfig.RebuildAll {
for pkg, headOid := range headSubmodules {
if baseOid, exists := baseSubmodules[pkg]; !exists || baseOid != headOid {
modifiedOrNew = append(modifiedOrNew, pkg)
if exists {
modifiedPackages = append(modifiedPackages, pkg)
} else {
newPackages = append(newPackages, pkg)
}
common.LogDebug(pkg, ":", baseOid, "->", headOid)
}
}
}
if len(modifiedOrNew) == 0 {
if len(modifiedPackages) == 0 && len(newPackages) == 0 {
rebuild_all := false || stagingConfig.RebuildAll
reviews, err := gitea.GetPullRequestReviews(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
common.LogDebug("num reviews:", len(reviews))
if err == nil {
rebuild_rx := regexp.MustCompile("^@autogits_obs_staging_bot\\s*:\\s*(re)?build\\s*all$")
rebuild_rx := regexp.MustCompile("^@autogits_obs_staging_bot\\s*:?\\s*(re)?build\\s*all$")
done:
for _, r := range reviews {
for _, l := range common.SplitLines(r.Body) {
@@ -826,6 +871,22 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
TargetURL: ObsWebHost + "/project/show/" + stagingProject,
}
if err != nil {
msg := "Unable to setup stage project " + stagingConfig.ObsProject
status.Status = common.CommitStatus_Fail
common.LogError(msg)
if !IsDryRun {
SetStatus(gitea, org, repo, pr.Head.Sha, status)
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, msg)
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
return false, nil
}
msg := "Changed source updated for build"
if change == RequestModificationProjectCreated {
msg = "Build is started in " + ObsWebHost + "/project/show/" +
@@ -834,8 +895,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if len(stagingConfig.QA) > 0 {
msg = msg + "\nAdditional QA builds: \n"
}
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status)
SetStatus(gitea, org, repo, pr.Head.Sha, status)
for _, setup := range stagingConfig.QA {
CreateQASubProject(stagingConfig, git, gitea, pr,
stagingProject,
@@ -849,42 +909,44 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
gitea.AddComment(pr, msg)
}
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedOrNew...)
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedPackages...)
if err != nil {
common.LogError("failed fetching ref project status for", stagingConfig.ObsProject, ":", err)
}
stagingResult, err := ObsClient.BuildStatus(stagingProject)
if err != nil {
common.LogError("failed fetching ref project status for", stagingProject, ":", err)
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
}
buildStatus := ProcessBuildStatus(stagingResult, baseResult)
done := false
switch buildStatus {
case BuildStatusSummarySuccess:
status.Status = common.CommitStatus_Success
done = true
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "Build successful")
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
case BuildStatusSummaryFailed:
status.Status = common.CommitStatus_Fail
done = true
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed")
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
}
common.LogInfo("Build status:", buildStatus)
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status)
// waiting for build results -- nothing to do
if !IsDryRun {
if err = SetStatus(gitea, org, repo, pr.Head.Sha, status); err != nil {
return false, err
}
}
return done, nil
} else if err == NonActionableReviewError || err == NoReviewsFoundError {
return true, nil
@@ -943,7 +1005,6 @@ func PollWorkNotifications(giteaUrl string) {
var ListPullNotificationsOnly bool
var GiteaUrl string
var GiteaUseSshClone bool
var ObsWebHost string
var IsDryRun bool
var ProcessPROnly string
@@ -967,7 +1028,6 @@ func main() {
ProcessPROnly := flag.String("pr", "", "Process only specific PR and ignore the rest. Use for debugging")
buildRoot := flag.String("build-root", "", "Default build location for staging projects. Default is bot's home project")
flag.StringVar(&GiteaUrl, "gitea-url", "https://src.opensuse.org", "Gitea instance")
flag.BoolVar(&GiteaUseSshClone, "use-ssh-clone", false, "enforce cloning via ssh")
obsApiHost := flag.String("obs", "https://api.opensuse.org", "API for OBS instance")
flag.StringVar(&ObsWebHost, "obs-web", "", "Web OBS instance, if not derived from the obs config")
flag.BoolVar(&IsDryRun, "dry", false, "Dry-run, don't actually create any build projects or review changes")
@@ -1001,7 +1061,7 @@ func main() {
}
if len(*ProcessPROnly) > 0 {
rx := regexp.MustCompile("^(\\w+)/(\\w+)#(\\d+)$")
rx := regexp.MustCompile("^([^/#]+)/([^/#]+)#([0-9]+)$")
m := rx.FindStringSubmatch(*ProcessPROnly)
if m == nil {
common.LogError("Cannot find any PR matches in", *ProcessPROnly)

View File

@@ -4,9 +4,17 @@ OBS Status Service
Reports build status of OBS service as an easily to produce SVG
Requests for individual build results:
/obs:project/package/repo/arch
/status/obs:project/package/repo/arch
where `repo` and `arch` are optional parameters.
Requests for project results
/obs:project
/status/obs:project
Get requests for / will also return 404 statu normally. If the Backend redis
server is not available, it will return 500
Areas of Responsibility
@@ -23,3 +31,14 @@ Target Usage
* README.md of package git or project git
* comment section of a Gitea PR
Running
-------
Default parameters can be changed by env variables
| Environment variable | Default | Description
|---------------------------------|-----------------------------|------------
| `OBS_STATUS_SERVICE_OBS_URL` | https://build.opensuse.org | Location for creating build logs and monitor page build results
| `OBS_STATUS_SERVICE_LISTEN` | [::1]:8080 | Listening address and port
| `OBS_STATUS_SERVICE_CERT` | /run/obs-status-service.pem | Location of certificate file for service
| `OBS_STATUS_SERVICE_KEY` | /run/obs-status-service.pem | Location of key file for service

View File

@@ -19,12 +19,15 @@ package main
*/
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"html"
"io"
"log"
"maps"
"net/http"
"net/url"
"os"
"slices"
"strings"
@@ -39,52 +42,83 @@ const (
var obs *common.ObsClient
type RepoBuildCounters struct {
Repository, Arch string
Status string
BuildStatusCounter map[string]int
}
func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
if len(res) == 0 {
return nil
}
list := common.BuildResultList{
Result: res,
}
pkgs := list.GetPackageList()
package_names := list.GetPackageList()
maxLen := 0
for _, p := range pkgs {
for _, p := range package_names {
maxLen = max(maxLen, len(p))
}
width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
height := 1.5*float32(maxLen) + 30
// width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
// height := 1.5*float32(maxLen) + 30
ret := NewSvg(SvgType_Project)
ret := bytes.Buffer{}
ret.WriteString(`<svg version="2.0" width="`)
ret.WriteString(fmt.Sprint(width))
ret.WriteString(`em" height="`)
ret.WriteString(fmt.Sprint(height))
ret.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
ret.WriteString(`<defs>
<g id="f"> <!-- failed -->
<rect width="8em" height="1.5em" fill="#800" />
</g>
<g id="s"> <!--succeeded-->
<rect width="8em" height="1.5em" fill="#080" />
</g>
<g id="buidling"> <!--building-->
<rect width="8em" height="1.5em" fill="#880" />
</g>
</defs>`)
status := make([]RepoBuildCounters, len(res))
ret.WriteString(`<use href="#f" x="1em" y="2em"/>`)
ret.WriteString(`</svg>`)
return ret.Bytes()
for i, repo := range res {
status[i].Arch = repo.Arch
status[i].Repository = repo.Repository
status[i].Status = repo.Code
status[i].BuildStatusCounter = make(map[string]int)
for _, pkg := range repo.Status {
status[i].BuildStatusCounter[pkg.Code]++
}
}
slices.SortFunc(status, func(a, b RepoBuildCounters) int {
if r := strings.Compare(a.Repository, b.Repository); r != 0 {
return r
}
return strings.Compare(a.Arch, b.Arch)
})
repoName := ""
ret.ypos = 3.0
for _, repo := range status {
if repo.Repository != repoName {
repoName = repo.Repository
ret.WriteTitle(repoName)
}
ret.WriteSubtitle(repo.Arch)
statuses := slices.Sorted(maps.Keys(repo.BuildStatusCounter))
for _, status := range statuses {
ret.WriteProjectStatus(res[0].Project, repo.Repository, repo.Arch, status, repo.BuildStatusCounter[status])
}
}
return ret.GenerateSvg()
}
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
if R != nil && S != nil {
switch S.Code {
case "succeeded", "failed", "building":
return "/buildlog/" + R.Project + "/" + S.Package + "/" + R.Repository + "/" + R.Arch
return "/buildlog/" + url.PathEscape(R.Project) + "/" + url.PathEscape(S.Package) + "/" + url.PathEscape(R.Repository) + "/" + url.PathEscape(R.Arch)
}
}
return ""
}
func DeleteExceptPkg(pkg string) func(*common.PackageBuildStatus) bool {
return func(item *common.PackageBuildStatus) bool {
multibuild_prefix := pkg + ":"
return item.Package != pkg && !strings.HasPrefix(item.Package, multibuild_prefix)
}
}
func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
// per repo, per arch status bins
repo_names := []string{}
@@ -104,7 +138,7 @@ func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
}
}
ret := NewSvg()
ret := NewSvg(SvgType_Package)
for _, pkg = range package_names {
// if len(package_names) > 1 {
ret.WriteTitle(pkg)
@@ -159,31 +193,50 @@ func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus)
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">` +
`<rect width="100%" height="100%" fill="` + fillColor + `"/>` + startTag +
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>` + endTag + `</svg>`)
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + html.EscapeString(buildStatus.Code) + `</text>` + endTag + `</svg>`)
}
func WriteJson(data any, res http.ResponseWriter) {
if jsonArray, err := json.MarshalIndent(data, "", " "); err != nil {
res.WriteHeader(500)
} else {
res.Header().Add("size", fmt.Sprint(len(jsonArray)))
res.Write(jsonArray)
}
}
var ObsUrl *string
func main() {
cert := flag.String("cert-file", "", "TLS certificates file")
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
obsUrlDef := os.Getenv("OBS_STATUS_SERVICE_OBS_URL")
if len(obsUrlDef) == 0 {
obsUrlDef = "https://build.opensuse.org"
}
listenDef := os.Getenv("OBS_STATUS_SERVICE_LISTEN")
if len(listenDef) == 0 {
listenDef = "[::1]:8080"
}
certDef := os.Getenv("OBS_STATUS_SERVICE_CERT")
if len(certDef) == 0 {
certDef = "/run/obs-status-service.pem"
}
keyDef := os.Getenv("OBS_STATUS_SERVICE_KEY")
if len(keyDef) == 0 {
keyDef = certDef
}
cert := flag.String("cert-file", certDef, "TLS certificates file")
key := flag.String("key-file", keyDef, "Private key for the TLS certificate")
listen := flag.String("listen", listenDef, "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
obsUrl := flag.String("obs-url", "https://api.opensuse.org", "OBS API endpoint for package buildlog information")
ObsUrl = flag.String("obs-url", obsUrlDef, "OBS API endpoint for package buildlog information")
debug := flag.Bool("debug", false, "Enable debug logging")
// RabbitMQHost := flag.String("rabbit-mq", "amqps://rabbit.opensuse.org", "RabbitMQ message bus server")
// Topic := flag.String("topic", "opensuse.obs", "RabbitMQ topic prefix")
flag.Parse()
if *debug {
common.SetLoggingLevel(common.LogLevelDebug)
}
// common.PanicOnError(common.RequireObsSecretToken())
var err error
if obs, err = common.NewObsClient(*obsUrl); err != nil {
log.Fatal(err)
}
if redisUrl := os.Getenv("REDIS"); len(redisUrl) > 0 {
RedisConnect(redisUrl)
} else {
@@ -191,81 +244,156 @@ func main() {
return
}
var rescanRepoError error
go func() {
for {
if err := RescanRepositories(); err != nil {
common.LogError("Failed to rescan repositories.", err)
if rescanRepoError = RescanRepositories(); rescanRepoError != nil {
common.LogError("Failed to rescan repositories.", rescanRepoError)
}
time.Sleep(time.Minute * 5)
}
}()
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
obsPrj := req.PathValue("Project")
common.LogInfo(" request: GET /status/" + obsPrj)
res.WriteHeader(http.StatusBadRequest)
http.HandleFunc("GET /", func(res http.ResponseWriter, req *http.Request) {
if rescanRepoError != nil {
res.WriteHeader(500)
return
}
res.WriteHeader(404)
res.Write([]byte("404 page not found\n"))
})
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
common.LogInfo(" GET /status/" + obsPrj, "[" + mime.MimeType() + "]")
status := FindAndUpdateProjectResults(obsPrj)
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
res.Header().Add("content-type", mime.MimeHeader)
if mime.IsSvg() {
svg := ProjectStatusSummarySvg(status)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
}
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" request: GET /status/" + obsPrj + "/" + obsPkg)
common.LogInfo(" GET /status/" + obsPrj + "/" + obsPkg, "[" + mime.MimeType() + "]")
status := FindAndUpdateRepoResults(obsPrj, repo)
status := slices.Clone(FindAndUpdateProjectResults(obsPrj))
for _, s := range status {
s.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
}
if len(status) == 0 {
res.WriteHeader(404)
return
}
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", "image/svg+xml")
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
res.Header().Add("content-type", mime.MimeHeader)
if mime.IsSvg() {
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
}
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" GET /status/" + obsPrj + "/" + obsPkg, "[" + mime.MimeType() + "]")
status := slices.Clone(FindAndUpdateRepoResults(obsPrj, repo))
for _, s := range status {
s.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
}
if len(status) == 0 {
res.WriteHeader(404)
return
}
if mime.IsSvg() {
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", mime.MimeHeader)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
}
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
common.LogInfo("GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch)
common.LogInfo(" GET /status/" + prj + "/" + pkg + "/" + repo + "/" + arch, "[" + mime.MimeType() + "]")
res.Header().Add("content-type", "image/svg+xml")
for _, r := range FindAndUpdateProjectResults(prj) {
if r.Arch == arch && r.Repository == repo {
res.Header().Add("content-type", mime.MimeHeader)
for _, r := range FindAndUpdateRepoResults(prj, repo) {
if r.Arch == arch {
if idx, found := slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res.Write(BuildStatusSvg(r, r.Status[idx]))
status := r.Status[idx]
if mime.IsSvg() {
res.Write(BuildStatusSvg(r, status))
} else if mime.IsJson() {
WriteJson(status, res)
}
return
}
break
}
}
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
if mime.IsSvg() {
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
}
})
http.HandleFunc("GET /search", func(res http.ResponseWriter, req *http.Request) {
common.LogInfo("GET /search?" + req.URL.RawQuery)
queries := req.URL.Query()
if !queries.Has("q") {
res.WriteHeader(400)
return
}
names := queries["q"]
if len(names) != 1 {
res.WriteHeader(400)
return
}
packages := FindPackages(names[0])
data, err := json.MarshalIndent(packages, "", " ")
if err != nil {
res.WriteHeader(500)
common.LogError("Error in marshalling data.", err)
return
}
res.Write(data)
res.Header().Add("content-type", "application/json")
res.WriteHeader(200)
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
res.Header().Add("location", "https://build.opensuse.org/package/live_build_log/"+prj+"/"+pkg+"/"+repo+"/"+arch)
res.Header().Add("location", *ObsUrl+"/package/live_build_log/"+url.PathEscape(prj)+"/"+url.PathEscape(pkg)+"/"+url.PathEscape(repo)+"/"+url.PathEscape(arch))
res.WriteHeader(307)
return

View File

@@ -8,11 +8,13 @@ import (
)
func TestStatusSvg(t *testing.T) {
ObsUrl = &[]string{"http://nothing.is.here"}[0]
os.WriteFile("teststatus.svg", BuildStatusSvg(nil, &common.PackageBuildStatus{
Package: "foo",
Code: "succeeded",
Details: "more success here",
}), 0o777)
data := []*common.BuildResult{
{
Project: "project:foo",

View File

@@ -0,0 +1,54 @@
package main
import (
"net/http"
"strings"
)
type MimeHeader struct {
MimeHeader string
}
const (
JsonMime = "application/json"
SvgMime = "image/svg+xml"
)
var AcceptedStatusMimes []string = []string{
SvgMime,
JsonMime,
}
func ParseMimeHeader(req *http.Request) *MimeHeader {
proposedMimes := req.Header.Values("Accept")
mime := MimeHeader{MimeHeader: SvgMime}
if len(proposedMimes) == 0 {
return &mime
}
for _, m := range proposedMimes {
for _, am := range AcceptedStatusMimes {
if strings.Contains(m, am) {
mime.MimeHeader = am
return &mime
}
}
}
return &mime
}
func (m *MimeHeader) IsJson() bool {
return m.MimeHeader == JsonMime
}
func (m *MimeHeader) IsSvg() bool {
return m.MimeHeader == SvgMime
}
func (m *MimeHeader) MimeType() string {
if m.IsJson() {
return JsonMime
}
return SvgMime // default
}

View File

@@ -29,13 +29,15 @@ func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock()
defer RepoStatusLock.Unlock()
updateResultsWithoutLocking(r)
}
func updateResultsWithoutLocking(r *common.BuildResult) {
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
common.LogDebug(" + Updating", key)
data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil {
common.LogError("Failed fetching build results for", key, err)
}
common.LogDebug(" + Update size", len(data))
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status {
@@ -88,6 +90,10 @@ func FindProjectResults(project string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindProjectResultsNoLock(project)
}
func FindProjectResultsNoLock(project string) []*common.BuildResult {
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project {
@@ -101,6 +107,10 @@ func FindRepoResults(project, repo string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindRepoResultsNoLock(project, repo)
}
func FindRepoResultsNoLock(project, repo string) []*common.BuildResult {
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project, Repository: repo}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project && RepoStatus[idx].Repository == repo {
@@ -110,6 +120,31 @@ func FindRepoResults(project, repo string) []*common.BuildResult {
return ret
}
func FindPackages(pkg string) []string {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindPackagesNoLock(pkg)
}
func FindPackagesNoLock(pkg string) []string {
data := make([]string, 0, 100)
for _, repo := range RepoStatus {
for _, status := range repo.Status {
if pkg == status.Package {
entry := repo.Project + "/" + pkg
if idx, found := slices.BinarySearch(data, entry); !found {
data = slices.Insert(data, idx, entry)
if len(data) >= 100 {
return data
}
}
}
}
}
return data
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project)
wg := &sync.WaitGroup{}
@@ -161,6 +196,8 @@ func RescanRepositories() error {
RepoStatusLock.Unlock()
var count int
projectsLooked := make([]string, 0, 10000)
for {
var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
@@ -169,6 +206,7 @@ func RescanRepositories() error {
return err
}
wg := &sync.WaitGroup{}
RepoStatusLock.Lock()
for _, repo := range data {
r := strings.Split(repo, "/")
@@ -180,14 +218,28 @@ func RescanRepositories() error {
Repository: r[1],
Arch: r[2],
}
if pos, found := slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
var pos int
var found bool
if pos, found = slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true
} else {
d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d)
count++
}
// fetch all keys, one per non-maintenance/non-home: projects, for package search
if idx, found := slices.BinarySearch(projectsLooked, d.Project); !found && !strings.Contains(d.Project, ":Maintenance:") && (len(d.Project) < 5 || d.Project[0:5] != "home:") {
projectsLooked = slices.Insert(projectsLooked, idx, d.Project)
wg.Add(1)
go func(r *common.BuildResult) {
updateResultsWithoutLocking(r)
wg.Done()
}(RepoStatus[pos])
}
}
wg.Wait()
RepoStatusLock.Unlock()
if cursor == 0 {

View File

@@ -3,6 +3,8 @@ package main
import (
"bytes"
"fmt"
"html"
"net/url"
"slices"
)
@@ -12,65 +14,78 @@ type SvgWriter struct {
out bytes.Buffer
}
func NewSvg() *SvgWriter {
const (
SvgType_Package = iota
SvgType_Project
)
func NewSvg(SvgType int) *SvgWriter {
svg := &SvgWriter{}
svg.header = []byte(`<svg version="2.0" overflow="auto" width="40ex" height="`)
svg.out.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
switch SvgType {
case SvgType_Package:
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
case SvgType_Project:
svg.out.WriteString(`<defs>
</defs>`)
}
return svg
}
func (svg *SvgWriter) WriteTitle(title string) {
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + title + "</text>")
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + html.EscapeString(title) + "</text>")
svg.ypos += 2.5
}
func (svg *SvgWriter) WriteSubtitle(subtitle string) {
svg.out.WriteString(`<use href="#repotitle" y="` + fmt.Sprint(svg.ypos-2) + `em"/>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + subtitle + `</text>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + html.EscapeString(subtitle) + `</text>`)
svg.ypos += 2
}
@@ -97,23 +112,38 @@ func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
return "un"
}
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + arch + `</text>`)
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + html.EscapeString(arch) + `</text>`)
svg.out.WriteString(`<g>`)
if len(loglink) > 0 {
svg.out.WriteString(`<a href="` + loglink + `" target="_blank" rel="noopener">`)
u, err := url.Parse(loglink)
if err == nil {
svg.out.WriteString(`<a href="` + u.String() + `" target="_blank" rel="noopener">`)
}
}
svg.out.WriteString(`<use href="#` + StatusToSVG(status) + `" x="20ex" y="` + fmt.Sprint(svg.ypos-1.7) + `em"/>`)
if len(loglink) > 0 {
svg.out.WriteString(`</a>`)
}
if len(detail) > 0 {
svg.out.WriteString(`<title>` + fmt.Sprint(detail) + "</title>")
svg.out.WriteString(`<title>` + html.EscapeString(detail) + "</title>")
}
svg.out.WriteString("</g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) WriteProjectStatus(project, repo, arch, status string, count int) {
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&amp;" + url.QueryEscape(status) + "=1&amp;arch_" + url.QueryEscape(arch) + "=1&amp;repo_" + url.QueryEscape(repo) + "=1")
if err != nil {
return
}
svg.out.WriteString(`<g><a href="` + u.String() + `" target="_blank" rel="noopener">` + "\n" +
`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-0.6) + "em\">\n" +
html.EscapeString(status+": ") + fmt.Sprint(count) + "</text></a></g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) GenerateSvg() []byte {
return slices.Concat(svg.header, []byte(fmt.Sprint(svg.ypos)), svg.out.Bytes(), []byte("</svg>"))
}

24
reparent-bot/README.md Normal file
View File

@@ -0,0 +1,24 @@
Reparent Bot
============
To be able to put new parents of repositories as special forks into
pool and other projects.
Areas of Responsibilities
-------------------------
* monitor issues for Add packages
+ issue creator *must be* owner of the repo, OR
+ repository must not be a fork
* assign organization Owner to review request
* reparent the repository and create a PR
* remove non-accepted repositories from /pool, if no other
branches are relevant here
Target Usage
------------
* devel and released products

View File

@@ -5,7 +5,7 @@ After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/gitea-events-rabbitmq-publisher
EnvironmentFile=-/etc/sysconfig/gitea-events-rabbitmq-publisher.env
EnvironmentFile=-/etc/default/gitea-events-rabbitmq-publisher.env
DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict

View File

@@ -0,0 +1,16 @@
[Unit]
Description=Staging bot for project git PRs in OBS
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/obs-staging-bot
EnvironmentFile=-/etc/default/obs-staging-bot.env
DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,15 @@
[Unit]
Description=OBS build status as SVG service
After=network-online.target
[Service]
Type=exec
Restart=on-failure
ExecStart=/usr/bin/obs-status-service
EnvironmentFile=-/etc/default/obs-status-service.env
DynamicUser=yes
ProtectSystem=strict
[Install]
WantedBy=multi-user.target

View File

@@ -34,6 +34,7 @@ JSON
* _ManualMergeProject_: (true, false) only merge if "merge ok" by project maintainers or reviewers
* _ReviewRequired_: (true, false) ignores that submitter is a maintainer and require a review from other maintainer IFF available
* _NoProjectGitPR_: (true, false) do not create PrjGit PRs, but still process reviews, etc.
* _Permissions_: permissions and associated accounts/groups. See below.
NOTE: `-rm`, `-removed`, `-deleted` are all removed suffixes used to indicate current branch is a placeholder for previously existing package. These branches will be ignored by the bot, and if default, the package will be removed and will not be added to the project.
example:
@@ -89,3 +90,22 @@ example:
"": ["reviewer3", "reviewer4"]
}
Permissions
-----------
Permissions are extra permissions assigned to groups or individuals. Groups must be defined in
the `workflow.config`.
```
Permissions: []{
Permission: "force-push" | "release-engineering"
Members: []string
}
```
* `force-push` -- allows to issue force-push to the bot to merge even without reviews
* `release-engineering` -- merge, split package PRs and merge additional commits
NOTE: Project Maintainers have these permissions automatically.

View File

@@ -4,11 +4,13 @@ package main
import (
"encoding/json"
"errors"
"fmt"
"path"
"runtime/debug"
"slices"
"strings"
"time"
"github.com/opentracing/opentracing-go/log"
"src.opensuse.org/autogits/common"
@@ -266,6 +268,7 @@ func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch
return nil
}
var updatePrjGitError_requeue error = errors.New("Commits do not match. Requeing after 5 seconds.")
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
_, _, PrjGitBranch := prset.Config.GetPrjGit()
PrjGitPR, err := prset.GetPrjGitPR()
@@ -283,7 +286,7 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
PrjGit := PrjGitPR.PR.Base.Repo
prjGitPRbranch := PrjGitPR.PR.Head.Name
if strings.Contains(prjGitPRbranch, "/") {
if PrjGitPR.PR.Base.RepoID != PrjGitPR.PR.Head.RepoID {
PrjGitPR.RemoteName, err = git.GitClone(common.DefaultGitPrj, "", PrjGit.SSHURL)
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitPR.PR.Head.Sha)
git.GitExecOrPanic(common.DefaultGitPrj, "checkout", PrjGitPR.PR.Head.Sha)
@@ -317,18 +320,28 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
}
PrjGitTitle, PrjGitBody := PrjGitDescription(prset)
if PrjGitPR.PR.Title != PrjGitTitle || PrjGitPR.PR.Body != PrjGitBody {
common.LogDebug("New title:", PrjGitTitle)
common.LogDebug(PrjGitBody)
if PrjGitPR.PR.User.UserName == CurrentUser.UserName {
if PrjGitPR.PR.Title != PrjGitTitle || PrjGitPR.PR.Body != PrjGitBody {
common.LogDebug("New title:", PrjGitTitle)
common.LogDebug(PrjGitBody)
}
} else {
// TODO: find our first comment in timeline
}
if !common.IsDryRun {
if headCommit != PrjGitPR.PR.Head.Sha {
common.LogError("HeadCommit:", headCommit, "is not what's expected from the PR:", PrjGitPR.PR.Head.Ref, " Requeing.")
return updatePrjGitError_requeue
}
if headCommit != newHeadCommit {
params := []string{"push", PrjGitPR.RemoteName, "+HEAD:" + prjGitPRbranch}
if forcePush {
params = slices.Insert(params, 1, "-f")
}
common.PanicOnError(git.GitExec(common.DefaultGitPrj, params...))
PrjGitPR.PR.Head.Sha = newHeadCommit
}
// update PR
@@ -367,13 +380,7 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
prjGitPRbranch := prGitBranchNameForPR(prRepo, prNo)
prjGitPR, err := prset.GetPrjGitPR()
if err == common.PRSet_PrjGitMissing {
if config.NoProjectGitPR {
common.LogDebug("No Project Git PR yet and we are set to not create it.")
return nil
}
common.LogDebug("Missing PrjGit. Need to create one under branch", prjGitPRbranch)
if err = pr.CreatePRjGitPR(prjGitPRbranch, prset); err != nil {
@@ -565,6 +572,7 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
type RequestProcessor struct {
configuredRepos map[string][]*common.AutogitConfig
recursive int
}
func ProcesPullRequest(pr *models.PullRequest, configs []*common.AutogitConfig) error {
@@ -583,16 +591,22 @@ func ProcesPullRequest(pr *models.PullRequest, configs []*common.AutogitConfig)
return PRProcessor.Process(pr)
}
func (w *RequestProcessor) ProcessFunc(request *common.Request) error {
func (w *RequestProcessor) ProcessFunc(request *common.Request) (err error) {
defer func() {
if r := recover(); r != nil {
common.LogInfo("panic cought --- recovered")
common.LogError(string(debug.Stack()))
}
w.recursive--
}()
w.recursive++
if w.recursive > 3 {
common.LogError("Recursion limit reached... something is wrong with this PR?")
return nil
}
var pr *models.PullRequest
var err error
if req, ok := request.Data.(*common.PullRequestWebhookEvent); ok {
pr, err = Gitea.GetPullRequest(req.Pull_Request.Base.Repo.Owner.Username, req.Pull_Request.Base.Repo.Name, req.Pull_Request.Number)
if err != nil {
@@ -614,5 +628,9 @@ func (w *RequestProcessor) ProcessFunc(request *common.Request) error {
if !ok {
common.LogError("*** Cannot find config for org:", pr.Base.Repo.Owner.UserName)
}
return ProcesPullRequest(pr, configs)
if err = ProcesPullRequest(pr, configs); err == updatePrjGitError_requeue {
time.Sleep(time.Second * 5)
return w.ProcessFunc(request)
}
return err
}