1 Commits

Author SHA256 Message Date
3d8671a7fe WIP: conflict resolution 2025-07-15 23:24:50 +02:00
738 changed files with 1719 additions and 160284 deletions

View File

@@ -1,34 +0,0 @@
name: go-generate-check
on:
push:
branches: ['main']
paths:
- '**.go'
- '**.mod'
- '**.sum'
pull_request:
paths:
- '**.go'
- '**.mod'
- '**.sum'
workflow_dispatch:
jobs:
go-generate-check:
name: go-generate-check
container:
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
steps:
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
- run: git fetch origin ${{ gitea.ref }}
- run: git checkout FETCH_HEAD
- run: go generate -C common
- run: go generate -C workflow-pr
- run: go generate -C workflow-pr/interfaces
- run: git add -N .; git diff
- run: |
status=$(git status --short)
if [[ -n "$status" ]]; then
echo -e "$status"
echo "Please commit the differences from running: go generate"
false
fi

View File

@@ -1,25 +0,0 @@
name: go-generate-push
on:
workflow_dispatch:
jobs:
go-generate-push:
name: go-generate-push
container:
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
steps:
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
- run: git fetch origin ${{ gitea.ref }}
- run: git checkout FETCH_HEAD
- run: go generate -C common
- run: go generate -C workflow-pr
- run: go generate -C workflow-pr/interfaces
- run: |
host=${{ gitea.server_url }}
host=${host#https://}
echo $host
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
git config user.name "Gitea Actions"
git config user.email "gitea_noreply@opensuse.org"
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go generate"; git push origin HEAD:${{ gitea.ref }} )'
- run: git log -p FETCH_HEAD...HEAD
- run: git log --numstat FETCH_HEAD...HEAD

View File

@@ -1,33 +0,0 @@
name: go-vendor-check
on:
push:
branches: ['main']
paths:
- '**.mod'
- '**.sum'
pull_request:
paths:
- '**.mod'
- '**.sum'
workflow_dispatch:
jobs:
go-generate-check:
name: go-vendor-check
container:
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
steps:
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
- run: git fetch origin ${{ gitea.ref }}
- run: git checkout FETCH_HEAD
- run: go mod download
- run: go mod vendor
- run: go mod verify
- run: git add -N .; git diff
- run: go mod tidy -diff || true
- run: |
status=$(git status --short)
if [[ -n "$status" ]]; then
echo -e "$status"
echo "Please commit the differences from running: go generate"
false
fi

View File

@@ -1,26 +0,0 @@
name: go-generate-push
on:
workflow_dispatch:
jobs:
go-generate-push:
name: go-generate-push
container:
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
steps:
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
- run: git fetch origin ${{ gitea.ref }}
- run: git checkout FETCH_HEAD
- run: go mod download
- run: go mod vendor
- run: go mod verify
- run: |
host=${{ gitea.server_url }}
host=${host#https://}
echo $host
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
git config user.name "Gitea Actions"
git config user.email "gitea_noreply@opensuse.org"
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go mod vendor"; git push origin HEAD:${{ gitea.ref }} )'
- run: go mod tidy -diff || true
- run: git log -p FETCH_HEAD...HEAD
- run: git log --numstat FETCH_HEAD...HEAD

7
.gitignore vendored
View File

@@ -1,4 +1,5 @@
mock
node_modules
*.obscpio
autogits-tmp.tar.zst
*.osc
*.conf
utils/gitmodules-automerge/gitmodules-automerge
utils/hujson/hujson

View File

@@ -5,15 +5,11 @@ The bots that drive Git Workflow for package management
* devel-importer -- helper to import an OBS devel project into a Gitea organization
* gitea-events-rabbitmq-publisher -- takes all events from a Gitea organization (webhook) and publishes it on a RabbitMQ instance
* gitea-status-proxy -- allows bots without code owner permission to set Gitea's commit status
* group-review -- group review proxy
* hujson -- translates JWCC (json with commas and comments) to Standard JSON
* obs-forward-bot -- forwards PR as OBS sr (TODO)
* obs-staging-bot -- build bot for a PR
* obs-status-service -- report build status of an OBS project as an SVG
* workflow-pr -- keeps PR to _ObsPrj consistent with a PR to a package update
* workflow-direct -- update _ObsPrj based on direct pushes and repo creations/removals from organization
* staging-utils -- review tooling for PR (TODO)
* staging-utils -- review tooling for PR
- list PR
- merge PR
- split PR
@@ -23,18 +19,7 @@ The bots that drive Git Workflow for package management
Bugs
----
Report bugs to issue tracker at https://src.opensuse.org/git-workflow/autogits
Report bugs to issue tracker at https://src.opensuse.org/adamm/autogits
Build Status
------------
Devel project build status (`main` branch):
![devel:Factory:git-workflow](https://br.opensuse.org/status/devel:Factory:git-workflow/autogits)
`staging` branch build status:
![Staging Build Status](https://br.opensuse.org/status/home:adamm:autogits/autogits)

15
_service Normal file
View File

@@ -0,0 +1,15 @@
<services>
<!-- workaround, go_modules needs a tar and obs_scm doesn't take file://. -->
<service name="roast" mode="manual">
<param name="target">.</param>
<param name="reproducible">true</param>
<param name="outfile">autogits-tmp.tar.zst</param>
<param name="exclude">autogits-tmp.tar.zst</param>
</service>
<service name="go_modules" mode="manual">
<param name="basename">./</param>
<param name="compression">zst</param>
<param name="vendorname">vendor</param>
</service>
</services>

View File

@@ -22,9 +22,10 @@ Release: 0
Summary: GitWorkflow utilities
License: GPL-2.0-or-later
URL: https://src.opensuse.org/adamm/autogits
BuildRequires: git
Source1: vendor.tar.zst
BuildRequires: golang-packaging
BuildRequires: systemd-rpm-macros
BuildRequires: go
BuildRequires: zstd
%{?systemd_ordering}
%description
@@ -32,244 +33,160 @@ Git Workflow tooling and utilities enabling automated handing of OBS projects
as git repositories
%package devel-importer
Summary: Imports devel projects from obs to git
%description -n autogits-devel-importer
Command-line tool to import devel projects from obs to git
%package doc
Summary: Common documentation files
%description -n autogits-doc
Common documentation files
%package gitea-events-rabbitmq-publisher
%package -n gitea-events-rabbitmq-publisher
Summary: Publishes Gitea webhook data via RabbitMQ
%description gitea-events-rabbitmq-publisher
%description -n gitea-events-rabbitmq-publisher
Listens on an HTTP socket and publishes Gitea events on a RabbitMQ instance
with a topic
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
%package gitea-status-proxy
Summary: gitea-status-proxy
%package -n doc
Summary: Common documentation files
%description gitea-status-proxy
%description -n doc
Common documentation files
%package group-review
%package -n devel-importer
Summary: Imports devel projects from obs to git
%description -n devel-importer
Command-line tool to import devel projects from obs to git
%package -n group-review
Summary: Reviews of groups defined in ProjectGit
%description group-review
%description -n group-review
Is used to handle reviews associated with groups defined in the
ProjectGit.
%package obs-forward-bot
Summary: obs-forward-bot
%description obs-forward-bot
%package obs-staging-bot
%package -n obs-staging-bot
Summary: Build a PR against a ProjectGit, if review is requested
%description obs-staging-bot
%description -n obs-staging-bot
Build a PR against a ProjectGit, if review is requested.
%package obs-status-service
%package -n obs-status-service
Summary: Reports build status of OBS service as an easily to produce SVG
%description obs-status-service
%description -n obs-status-service
Reports build status of OBS service as an easily to produce SVG
%package utils
Summary: HuJSON to JSON parser
Provides: hujson
Provides: /usr/bin/hujson
%description utils
HuJSON to JSON parser, using stdin -> stdout pipe
gitmodules-automerge fixes conflicts in .gitmodules conflicts during merge
%package workflow-direct
%package -n workflow-direct
Summary: Keep ProjectGit in sync for a devel project
Requires: openssh-clients
Requires: git-core
%description workflow-direct
%description -n workflow-direct
Keep ProjectGit in sync with packages in the organization of a devel project
%package workflow-pr
%package -n workflow-pr
Summary: Keeps ProjectGit PR in-sync with a PackageGit PR
Requires: openssh-clients
Requires: git-core
%description workflow-pr
%description -n workflow-pr
Keeps ProjectGit PR in-sync with a PackageGit PR
%prep
cp -r /home/abuild/rpmbuild/SOURCES/* ./
tar x --zstd -f %{SOURCE1}
%build
go build \
-C devel-importer \
-buildmode=pie
go build \
-C utils/hujson \
-buildmode=pie
go build \
-C utils/gitmodules-automerge \
-buildmode=pie
go build \
-C gitea-events-rabbitmq-publisher \
-mod=vendor \
-buildmode=pie
go build \
-C gitea_status_proxy \
-C devel-importer \
-mod=vendor \
-buildmode=pie
go build \
-C group-review \
-buildmode=pie
go build \
-C obs-forward-bot \
-mod=vendor \
-buildmode=pie
go build \
-C obs-staging-bot \
-mod=vendor \
-buildmode=pie
go build \
-C obs-status-service \
-mod=vendor \
-buildmode=pie
go build \
-C workflow-direct \
-buildmode=pie
go build \
-C workflow-pr \
-buildmode=pie
%check
go test -C common -v
go test -C group-review -v
go test -C obs-staging-bot -v
go test -C obs-status-service -v
go test -C workflow-direct -v
# TODO build fails
#go test -C workflow-pr -v
#go build \
# -C workflow-direct \
# -mod=vendor \
# -buildmode=pie
#go build \
# -C workflow-pr \
# -mod=vendor \
# -buildmode=pie
%install
install -D -m0755 devel-importer/devel-importer %{buildroot}%{_bindir}/devel-importer
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
install -D -m0755 gitea_status_proxy/gitea_status_proxy %{buildroot}%{_bindir}/gitea_status_proxy
install -D -m0755 devel-importer/devel-importer %{buildroot}%{_bindir}/devel-importer
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
install -D -m0755 obs-forward-bot/obs-forward-bot %{buildroot}%{_bindir}/obs-forward-bot
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
install -D -m0755 utils/gitmodules-automerge/gitmodules-automerge %{buildroot}%{_bindir}/gitmodules-automerge
#install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
#install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
%pre gitea-events-rabbitmq-publisher
%pre -n gitea-events-rabbitmq-publisher
%service_add_pre gitea-events-rabbitmq-publisher.service
%post gitea-events-rabbitmq-publisher
%post -n gitea-events-rabbitmq-publisher
%service_add_post gitea-events-rabbitmq-publisher.service
%preun gitea-events-rabbitmq-publisher
%preun -n gitea-events-rabbitmq-publisher
%service_del_preun gitea-events-rabbitmq-publisher.service
%postun gitea-events-rabbitmq-publisher
%postun -n gitea-events-rabbitmq-publisher
%service_del_postun gitea-events-rabbitmq-publisher.service
%pre obs-staging-bot
%service_add_pre obs-staging-bot.service
%post obs-staging-bot
%service_add_post obs-staging-bot.service
%preun obs-staging-bot
%service_del_preun obs-staging-bot.service
%postun obs-staging-bot
%service_del_postun obs-staging-bot.service
%pre obs-status-service
%service_add_pre obs-status-service.service
%post obs-status-service
%service_add_post obs-status-service.service
%preun obs-status-service
%service_del_preun obs-status-service.service
%postun obs-status-service
%service_del_postun obs-status-service.service
%files devel-importer
%license COPYING
%doc devel-importer/README.md
%{_bindir}/devel-importer
%files doc
%license COPYING
%doc doc/README.md
%doc doc/workflows.md
%files gitea-events-rabbitmq-publisher
%files -n gitea-events-rabbitmq-publisher
%license COPYING
%doc gitea-events-rabbitmq-publisher/README.md
%{_bindir}/gitea-events-rabbitmq-publisher
%{_unitdir}/gitea-events-rabbitmq-publisher.service
%files gitea-status-proxy
%files -n doc
%license COPYING
%{_bindir}/gitea_status_proxy
%doc doc/README.md
%doc doc/workflows.md
%files group-review
%files -n devel-importer
%license COPYING
%doc devel-importer/README.md
%{_bindir}/devel-importer
%files -n group-review
%license COPYING
%doc group-review/README.md
%{_bindir}/group-review
%files obs-forward-bot
%license COPYING
%{_bindir}/obs-forward-bot
%files obs-staging-bot
%files -n obs-staging-bot
%license COPYING
%doc obs-staging-bot/README.md
%{_bindir}/obs-staging-bot
%{_unitdir}/obs-staging-bot.service
%files obs-status-service
%files -n obs-status-service
%license COPYING
%doc obs-status-service/README.md
%{_bindir}/obs-status-service
%{_unitdir}/obs-status-service.service
%files utils
%license COPYING
%{_bindir}/hujson
%{_bindir}/gitmodules-automerge
%files workflow-direct
%files -n workflow-direct
%license COPYING
%doc workflow-direct/README.md
%{_bindir}/workflow-direct
#%{_bindir}/workflow-direct
%files workflow-pr
%files -n workflow-pr
%license COPYING
%doc workflow-pr/README.md
%{_bindir}/workflow-pr
#%{_bindir}/workflow-pr

View File

@@ -11,7 +11,7 @@ import (
"strings"
)
const PrPattern = "PR: %s/%s!%d"
const PrPattern = "PR: %s/%s#%d"
type BasicPR struct {
Org, Repo string
@@ -36,14 +36,10 @@ func parsePrLine(line string) (BasicPR, error) {
return ret, errors.New("missing / separator")
}
repo := strings.SplitN(org[1], "!", 2)
repo := strings.SplitN(org[1], "#", 2)
ret.Repo = repo[0]
if len(repo) != 2 {
repo = strings.SplitN(org[1], "#", 2)
ret.Repo = repo[0]
}
if len(repo) != 2 {
return ret, errors.New("Missing ! or # separator")
return ret, errors.New("Missing # separator")
}
// Gitea requires that each org and repo be [A-Za-z0-9_-]+

View File

@@ -34,7 +34,7 @@ func TestAssociatedPRScanner(t *testing.T) {
},
{
"Multiple PRs",
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo!5\n",
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo#5\n",
[]common.BasicPR{
{Org: "test", Repo: "foo", Num: 4},
{Org: "test", Repo: "goo", Num: 5},
@@ -107,7 +107,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
[]common.BasicPR{
{Org: "a", Repo: "b", Num: 100},
},
"something\n\nPR: a/b!100",
"something\n\nPR: a/b#100",
},
{
"Append multiple PR to end of description",
@@ -119,7 +119,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
{Org: "b", Repo: "b", Num: 100},
{Org: "c", Repo: "b", Num: 100},
},
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
},
{
"Append multiple sorted PR to end of description and remove dups",
@@ -133,7 +133,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
{Org: "a1", Repo: "c", Num: 101},
{Org: "a1", Repo: "b", Num: 100},
},
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
},
}

View File

@@ -25,7 +25,6 @@ import (
"io"
"log"
"os"
"slices"
"strings"
"github.com/tailscale/hujson"
@@ -36,9 +35,6 @@ import (
const (
ProjectConfigFile = "workflow.config"
StagingConfigFile = "staging.config"
Permission_ForceMerge = "force-merge"
Permission_Group = "release-engineering"
)
type ConfigFile struct {
@@ -47,7 +43,6 @@ type ConfigFile struct {
type ReviewGroup struct {
Name string
Silent bool // will not request reviews from group members
Reviewers []string
}
@@ -56,23 +51,15 @@ type QAConfig struct {
Origin string
}
type Permissions struct {
Permission string
Members []string
}
type AutogitConfig struct {
Workflows []string // [pr, direct, test]
Organization string
GitProjectName string // Organization/GitProjectName.git is PrjGit
Branch string // branch name of PkgGit that aligns with PrjGit submodules
Reviewers []string // only used by `pr` workflow
Permissions []*Permissions // only used by `pr` workflow
ReviewGroups []*ReviewGroup
GitProjectName string // Organization/GitProjectName.git is PrjGit
Branch string // branch name of PkgGit that aligns with PrjGit submodules
Reviewers []string // only used by `pr` workflow
ReviewGroups []ReviewGroup
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
}
@@ -196,27 +183,6 @@ func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *Autogit
return nil
}
func (config *AutogitConfig) HasPermission(user, permission string) bool {
if config == nil {
return false
}
for _, p := range config.Permissions {
if p.Permission == permission {
if slices.Contains(p.Members, user) {
return true
}
for _, m := range p.Members {
if members, err := config.GetReviewGroupMembers(m); err == nil && slices.Contains(members, user) {
return true
}
}
}
}
return false
}
func (config *AutogitConfig) GetReviewGroupMembers(reviewer string) ([]string, error) {
for _, g := range config.ReviewGroups {
if g.Name == reviewer {
@@ -227,19 +193,10 @@ func (config *AutogitConfig) GetReviewGroupMembers(reviewer string) ([]string, e
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
}
func (config *AutogitConfig) GetReviewGroup(reviewer string) (*ReviewGroup, error) {
for _, g := range config.ReviewGroups {
if g.Name == reviewer {
return g, nil
}
}
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
}
func (config *AutogitConfig) GetPrjGit() (string, string, string) {
org := config.Organization
repo := DefaultGitPrj
branch := ""
branch := "master"
a := strings.Split(config.GitProjectName, "/")
if len(a[0]) > 0 {
@@ -263,9 +220,6 @@ func (config *AutogitConfig) GetPrjGit() (string, string, string) {
}
}
if len(branch) == 0 {
panic("branch for project is undefined. Should not happend." + org + "/" + repo)
}
return org, repo, branch
}
@@ -285,9 +239,6 @@ type StagingConfig struct {
func ParseStagingConfig(data []byte) (*StagingConfig, error) {
var staging StagingConfig
if len(data) == 0 {
return nil, errors.New("non-existent config file.")
}
data, err := hujson.Standardize(data)
if err != nil {
return nil, err

View File

@@ -109,7 +109,6 @@ func TestConfigWorkflowParser(t *testing.T) {
}
}
// FIXME: should test ReadWorkflowConfig as it will always set prjgit completely
func TestProjectGitParser(t *testing.T) {
tests := []struct {
name string
@@ -120,21 +119,20 @@ func TestProjectGitParser(t *testing.T) {
}{
{
name: "repo only",
prjgit: "repo.git#master",
prjgit: "repo.git",
org: "org",
branch: "br",
res: [3]string{"org", "repo.git", "master"},
},
{
name: "default",
org: "org",
prjgit: "org/_ObsPrj#master",
res: [3]string{"org", common.DefaultGitPrj, "master"},
name: "default",
org: "org",
res: [3]string{"org", common.DefaultGitPrj, "master"},
},
{
name: "repo with branch",
org: "org2",
prjgit: "org2/repo.git#somebranch",
prjgit: "repo.git#somebranch",
res: [3]string{"org2", "repo.git", "somebranch"},
},
{
@@ -151,25 +149,25 @@ func TestProjectGitParser(t *testing.T) {
{
name: "repo org and empty branch",
org: "org3",
prjgit: "oorg/foo.bar#master",
prjgit: "oorg/foo.bar#",
res: [3]string{"oorg", "foo.bar", "master"},
},
{
name: "only branch defined",
org: "org3",
prjgit: "org3/_ObsPrj#mybranch",
prjgit: "#mybranch",
res: [3]string{"org3", "_ObsPrj", "mybranch"},
},
{
name: "only org and branch defined",
org: "org3",
prjgit: "org1/_ObsPrj#mybranch",
prjgit: "org1/#mybranch",
res: [3]string{"org1", "_ObsPrj", "mybranch"},
},
{
name: "empty org and repo",
org: "org3",
prjgit: "org3/repo#master",
prjgit: "/repo#",
res: [3]string{"org3", "repo", "master"},
},
}
@@ -190,67 +188,3 @@ func TestProjectGitParser(t *testing.T) {
})
}
}
func TestConfigPermissions(t *testing.T) {
tests := []struct {
name string
permission string
user string
config *common.AutogitConfig
result bool
}{
{
name: "NoPermissions",
permission: common.Permission_ForceMerge,
},
{
name: "NoPermissions",
permission: common.Permission_Group,
},
{
name: "Regular permission ForcePush",
permission: common.Permission_ForceMerge,
result: true,
user: "user",
config: &common.AutogitConfig{
Permissions: []*common.Permissions{
&common.Permissions{
Permission: common.Permission_ForceMerge,
Members: []string{"user"},
},
},
},
},
{
name: "User is part of a group",
permission: common.Permission_ForceMerge,
result: true,
user: "user",
config: &common.AutogitConfig{
Permissions: []*common.Permissions{
&common.Permissions{
Permission: common.Permission_ForceMerge,
Members: []string{"group"},
},
},
ReviewGroups: []*common.ReviewGroup{
&common.ReviewGroup{
Name: "group",
Reviewers: []string{"some", "members", "including", "user"},
},
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
if r := test.config.HasPermission(test.user, test.permission); r != test.result {
t.Error("Expecting", test.result, "but got opposite")
}
if r := test.config.HasPermission(test.user+test.user, test.permission); r {
t.Error("Expecting false for fake user, but got opposite")
}
})
}
}

View File

@@ -1731,246 +1731,3 @@ const requestedReviewJSON = `{
"commit_id": "",
"review": null
}`
const requestStatusJSON=`{
"commit": {
"id": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"message": "Update nodejs-common.changes\n",
"url": "https://src.opensuse.org/autogits/nodejs-common/commit/e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"author": {
"name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"username": "adamm"
},
"committer": {
"name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"username": "adamm"
},
"verification": null,
"timestamp": "2025-09-16T12:41:02+02:00",
"added": [],
"removed": [],
"modified": [
"nodejs-common.changes"
]
},
"context": "test",
"created_at": "2025-09-16T10:50:32Z",
"description": "",
"id": 21663,
"repository": {
"id": 90520,
"owner": {
"id": 983,
"login": "autogits",
"login_name": "",
"source_id": 0,
"full_name": "",
"email": "",
"avatar_url": "https://src.opensuse.org/avatars/80a61ef3a14c3c22f0b8b1885d1a75d4",
"html_url": "https://src.opensuse.org/autogits",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2024-06-20T09:46:37+02:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 0,
"following_count": 0,
"starred_repos_count": 0,
"username": "autogits"
},
"name": "nodejs-common",
"full_name": "autogits/nodejs-common",
"description": "",
"empty": false,
"private": false,
"fork": true,
"template": false,
"parent": {
"id": 62649,
"owner": {
"id": 64,
"login": "pool",
"login_name": "",
"source_id": 0,
"full_name": "",
"email": "",
"avatar_url": "https://src.opensuse.org/avatars/b10a8c0bede9eb4ea771b04db3149f28",
"html_url": "https://src.opensuse.org/pool",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2023-03-01T14:41:17+01:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 2,
"following_count": 0,
"starred_repos_count": 0,
"username": "pool"
},
"name": "nodejs-common",
"full_name": "pool/nodejs-common",
"description": "",
"empty": false,
"private": false,
"fork": false,
"template": false,
"mirror": false,
"size": 134,
"language": "",
"languages_url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common/languages",
"html_url": "https://src.opensuse.org/pool/nodejs-common",
"url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common",
"link": "",
"ssh_url": "gitea@src.opensuse.org:pool/nodejs-common.git",
"clone_url": "https://src.opensuse.org/pool/nodejs-common.git",
"original_url": "",
"website": "",
"stars_count": 0,
"forks_count": 3,
"watchers_count": 12,
"open_issues_count": 0,
"open_pr_counter": 0,
"release_counter": 0,
"default_branch": "factory",
"archived": false,
"created_at": "2024-06-17T17:08:45+02:00",
"updated_at": "2025-08-21T21:58:31+02:00",
"archived_at": "1970-01-01T01:00:00+01:00",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": true,
"internal_tracker": {
"enable_time_tracker": false,
"allow_only_contributors_to_track_time": true,
"enable_issue_dependencies": true
},
"has_wiki": false,
"has_pull_requests": true,
"has_projects": false,
"projects_mode": "all",
"has_releases": false,
"has_packages": false,
"has_actions": false,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"allow_fast_forward_only_merge": true,
"allow_rebase_update": true,
"allow_manual_merge": true,
"autodetect_manual_merge": true,
"default_delete_branch_after_merge": false,
"default_merge_style": "merge",
"default_allow_maintainer_edit": false,
"avatar_url": "",
"internal": false,
"mirror_interval": "",
"object_format_name": "sha256",
"mirror_updated": "0001-01-01T00:00:00Z",
"topics": [],
"licenses": []
},
"mirror": false,
"size": 143,
"language": "",
"languages_url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common/languages",
"html_url": "https://src.opensuse.org/autogits/nodejs-common",
"url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common",
"link": "",
"ssh_url": "gitea@src.opensuse.org:autogits/nodejs-common.git",
"clone_url": "https://src.opensuse.org/autogits/nodejs-common.git",
"original_url": "",
"website": "",
"stars_count": 0,
"forks_count": 1,
"watchers_count": 4,
"open_issues_count": 0,
"open_pr_counter": 1,
"release_counter": 0,
"default_branch": "factory",
"archived": false,
"created_at": "2024-07-01T13:29:03+02:00",
"updated_at": "2025-09-16T12:41:03+02:00",
"archived_at": "1970-01-01T01:00:00+01:00",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": false,
"has_wiki": false,
"has_pull_requests": true,
"has_projects": false,
"projects_mode": "all",
"has_releases": false,
"has_packages": false,
"has_actions": false,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"allow_fast_forward_only_merge": true,
"allow_rebase_update": true,
"allow_manual_merge": true,
"autodetect_manual_merge": true,
"default_delete_branch_after_merge": false,
"default_merge_style": "merge",
"default_allow_maintainer_edit": false,
"avatar_url": "",
"internal": false,
"mirror_interval": "",
"object_format_name": "sha256",
"mirror_updated": "0001-01-01T00:00:00Z",
"topics": [],
"licenses": [
"MIT"
]
},
"sender": {
"id": 129,
"login": "adamm",
"login_name": "",
"source_id": 0,
"full_name": "Adam Majer",
"email": "adamm@noreply.src.opensuse.org",
"avatar_url": "https://src.opensuse.org/avatars/3e8917bfbf04293f7c20c28cacd83dae2ba9b78a6c6a9a1bedf14c683d8a3763",
"html_url": "https://src.opensuse.org/adamm",
"language": "",
"is_admin": false,
"last_login": "0001-01-01T00:00:00Z",
"created": "2023-07-21T16:43:48+02:00",
"restricted": false,
"active": false,
"prohibit_login": false,
"location": "",
"website": "",
"description": "",
"visibility": "public",
"followers_count": 1,
"following_count": 0,
"starred_repos_count": 0,
"username": "adamm"
},
"sha": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
"state": "pending",
"target_url": "https://src.opensuse.org/",
"updated_at": "2025-09-16T10:50:32Z"
}`

View File

@@ -1,296 +0,0 @@
package common
import (
"errors"
"fmt"
"io"
)
const (
GitStatus_Untracked = 0
GitStatus_Modified = 1
GitStatus_Ignored = 2
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
GitStatus_Renamed = 4 // orig name in States[0]
)
type GitStatusData struct {
Path string
Status int
States [3]string
/*
<sub> A 4 character field describing the submodule state.
"N..." when the entry is not a submodule.
"S<c><m><u>" when the entry is a submodule.
<c> is "C" if the commit changed; otherwise ".".
<m> is "M" if it has tracked changes; otherwise ".".
<u> is "U" if there are untracked changes; otherwise ".".
*/
SubmoduleChanges string
}
func parseGit_HexString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 32)
for {
c, err := data.ReadByte()
if err != nil {
return "", err
}
switch {
case c == 0 || c == ' ':
return string(str), nil
case c >= 'a' && c <= 'f':
case c >= 'A' && c <= 'F':
case c >= '0' && c <= '9':
default:
return "", errors.New("Invalid character in hex string:" + string(c))
}
str = append(str, c)
}
}
func parseGit_String(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 || c == ' ' {
return string(str), nil
}
str = append(str, c)
}
}
func parseGit_StringWithSpace(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 {
return string(str), nil
}
str = append(str, c)
}
}
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
for skipSpaceLen > 0 {
c, err := data.ReadByte()
if err != nil {
return err
}
if c == ' ' {
skipSpaceLen--
}
}
return nil
}
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
ret := GitStatusData{}
statusType, err := data.ReadByte()
if err != nil {
return nil, nil
}
switch statusType {
case '1':
var err error
if err = skipGitStatusEntry(data, 8); err != nil {
return nil, err
}
ret.Status = GitStatus_Modified
ret.Path, err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
case '2':
var err error
if err = skipGitStatusEntry(data, 9); err != nil {
return nil, err
}
ret.Status = GitStatus_Renamed
ret.Path, err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
ret.States[0], err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
case '?':
var err error
if err = skipGitStatusEntry(data, 1); err != nil {
return nil, err
}
ret.Status = GitStatus_Untracked
ret.Path, err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
case '!':
var err error
if err = skipGitStatusEntry(data, 1); err != nil {
return nil, err
}
ret.Status = GitStatus_Ignored
ret.Path, err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
case 'u':
var err error
if err = skipGitStatusEntry(data, 2); err != nil {
return nil, err
}
if ret.SubmoduleChanges, err = parseGit_String(data); err != nil {
return nil, err
}
if err = skipGitStatusEntry(data, 4); err != nil {
return nil, err
}
if ret.States[0], err = parseGit_HexString(data); err != nil {
return nil, err
}
if ret.States[1], err = parseGit_HexString(data); err != nil {
return nil, err
}
if ret.States[2], err = parseGit_HexString(data); err != nil {
return nil, err
}
ret.Status = GitStatus_Unmerged
ret.Path, err = parseGit_StringWithSpace(data)
if err != nil {
return nil, err
}
default:
return nil, errors.New("Invalid status type" + string(statusType))
}
return &ret, nil
}
func parseGitStatusData(data io.ByteReader) (Data, error) {
ret := make([]GitStatusData, 0, 10)
for {
data, err := parseSingleStatusEntry(data)
if err != nil {
return nil, err
} else if data == nil {
break
}
ret = append(ret, *data)
}
return ret, nil
}
type Data interface{}
type CommitStatus int
const (
Add CommitStatus = iota
Rm
Copy
Modify
Rename
TypeChange
Unmerged
Unknown
)
type GitDiffRawData struct {
SrcMode, DstMode string
SrcCommit, DstCommit string
Status CommitStatus
Src, Dst string
}
func parseGit_DiffIndexStatus(data io.ByteReader, d *GitDiffRawData) error {
b, err := data.ReadByte()
if err != nil {
return err
}
switch b {
case 'A':
d.Status = Add
case 'C':
d.Status = Copy
case 'D':
d.Status = Rm
case 'M':
d.Status = Modify
case 'R':
d.Status = Rename
case 'T':
d.Status = TypeChange
case 'U':
d.Status = Unmerged
case 'X':
return fmt.Errorf("Unexpected unknown change type. This is a git bug")
}
_, err = parseGit_StringWithSpace(data)
if err != nil {
return err
}
return nil
}
func parseSingleGitDiffIndexRawData(data io.ByteReader) (*GitDiffRawData, error) {
var ret GitDiffRawData
b, err := data.ReadByte()
if err != nil {
return nil, err
}
if b != ':' {
return nil, fmt.Errorf("Expected ':' but got '%s'", string(b))
}
if ret.SrcMode, err = parseGit_String(data); err != nil {
return nil, err
}
if ret.DstMode, err = parseGit_String(data); err != nil {
return nil, err
}
if ret.Src, err = parseGit_String(data); err != nil {
return nil, err
}
if ret.Dst, err = parseGit_String(data); err != nil {
return nil, err
}
if err = parseGit_DiffIndexStatus(data, &ret); err != nil {
return nil, err
}
ret.Dst = ret.Src
switch ret.Status {
case Copy, Rename:
if ret.Src, err = parseGit_StringWithSpace(data); err != nil {
return nil, err
}
}
return &ret, nil
}
func parseGitDiffIndexRawData(data io.ByteReader) (Data, error) {
ret := make([]GitDiffRawData, 0, 10)
for {
data, err := parseSingleGitDiffIndexRawData(data)
if err != nil {
return nil, err
} else if data == nil {
break
}
ret = append(ret, *data)
}
return ret, nil
}

View File

@@ -19,7 +19,9 @@ package common
*/
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"os"
@@ -38,15 +40,6 @@ type GitSubmoduleLister interface {
GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool)
}
type GitDirectoryLister interface {
GitDirectoryList(gitPath, commitId string) (dirlist map[string]string, err error)
}
type GitSubmoduleFileConflictResolver interface {
GitResolveConflicts(cwd, MergeBase, Head, MergeHead string) error
GitResolveSubmoduleFileConflict(s GitStatusData, cwd, mergeBase, head, mergeHead string) error
}
type GitStatusLister interface {
GitStatus(cwd string) ([]GitStatusData, error)
}
@@ -68,17 +61,14 @@ type Git interface {
io.Closer
GitSubmoduleLister
GitDirectoryLister
GitStatusLister
GitExecWithOutputOrPanic(cwd string, params ...string) string
GitExecOrPanic(cwd string, params ...string)
GitExec(cwd string, params ...string) error
GitExecWithOutput(cwd string, params ...string) (string, error)
GitExecQuietOrPanic(cwd string, params ...string)
GitDiffLister
GitSubmoduleFileConflictResolver
}
type GitHandlerImpl struct {
@@ -86,8 +76,7 @@ type GitHandlerImpl struct {
GitCommiter string
GitEmail string
lock *sync.Mutex
quiet bool
lock *sync.Mutex
}
func (s *GitHandlerImpl) GetPath() string {
@@ -222,7 +211,7 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
return "", fmt.Errorf("Cannot parse remote URL: %w", err)
}
remoteBranch := "HEAD"
if len(branch) == 0 && remoteUrlComp != nil && remoteUrlComp.Commit != "HEAD" {
if len(branch) == 0 && remoteUrlComp != nil {
branch = remoteUrlComp.Commit
remoteBranch = branch
} else if len(branch) > 0 {
@@ -251,51 +240,46 @@ func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error
// check if we have submodule to deinit
if list, _ := e.GitSubmoduleList(repo, "HEAD"); len(list) > 0 {
e.GitExecQuietOrPanic(repo, "submodule", "deinit", "--all", "--force")
e.GitExecOrPanic(repo, "submodule", "deinit", "--all", "--force")
}
e.GitExecOrPanic(repo, "fetch", "--prune", remoteName, remoteBranch)
}
/*
refsBytes, err := os.ReadFile(path.Join(e.GitPath, repo, ".git/refs/remotes", remoteName, "HEAD"))
if err != nil {
LogError("Cannot read HEAD of remote", remoteName)
return remoteName, fmt.Errorf("Cannot read HEAD of remote %s", remoteName)
}
refs := string(refsBytes)
if refs[0:5] != "ref: " {
LogError("Unexpected format of remote HEAD ref:", refs)
return remoteName, fmt.Errorf("Unexpected format of remote HEAD ref: %s", refs)
}
refsBytes, err := os.ReadFile(path.Join(e.GitPath, repo, ".git/refs/remotes", remoteName, "HEAD"))
if err != nil {
LogError("Cannot read HEAD of remote", remoteName)
return remoteName, fmt.Errorf("Cannot read HEAD of remote %s", remoteName)
}
refs := string(refsBytes)
if refs[0:5] != "ref: " {
LogError("Unexpected format of remote HEAD ref:", refs)
return remoteName, fmt.Errorf("Unexpected format of remote HEAD ref: %s", refs)
}
if len(branch) == 0 || branch == "HEAD" {
remoteRef = strings.TrimSpace(refs[5:])
branch = remoteRef[strings.LastIndex(remoteRef, "/")+1:]
LogDebug("remoteRef", remoteRef)
LogDebug("branch", branch)
}
if len(branch) == 0 || branch == "HEAD" {
remoteRef = strings.TrimSpace(refs[5:])
branch = remoteRef[strings.LastIndex(remoteRef, "/")+1:]
LogDebug("remoteRef", remoteRef)
LogDebug("branch", branch)
}
*/
args := []string{"fetch", "--prune", remoteName, branch}
if strings.TrimSpace(e.GitExecWithOutputOrPanic(repo, "rev-parse", "--is-shallow-repository")) == "true" {
args = slices.Insert(args, 1, "--unshallow")
}
e.GitExecOrPanic(repo, args...)
return remoteName, e.GitExec(repo, "checkout", "-f", "--track", "-B", branch, remoteRef)
return remoteName, e.GitExec(repo, "checkout", "--track", "-B", branch, remoteRef)
}
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--heads", "--hash", branchName)
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--hash", "--verify", "refs/heads/"+branchName)
if err != nil {
return "", fmt.Errorf("Can't find default branch: %s", branchName)
}
id = strings.TrimSpace(SplitLines(id)[0])
if len(id) < 10 {
return "", fmt.Errorf("Can't find branch: %s", branchName)
}
return id, nil
return strings.TrimSpace(id), nil
}
func (e *GitHandlerImpl) GitRemoteHead(gitDir, remote, branchName string) (string, error) {
@@ -357,20 +341,12 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
cmd.Env = []string{
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
"GIT_CONFIG_GLOBAL=/dev/null",
"GIT_AUTHOR_NAME=" + e.GitCommiter,
"GIT_COMMITTER_NAME=" + e.GitCommiter,
"EMAIL=not@exist@src.opensuse.org",
"GIT_LFS_SKIP_SMUDGE=1",
"GIT_LFS_SKIP_PUSH=1",
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
}
if len(e.GitEmail) > 0 {
cmd.Env = append(cmd.Env, "EMAIL="+e.GitEmail)
}
if len(e.GitCommiter) > 0 {
cmd.Env = append(cmd.Env,
"GIT_AUTHOR_NAME="+e.GitCommiter,
"GIT_COMMITTER_NAME="+e.GitCommiter,
)
}
if len(ExtraGitParams) > 0 {
cmd.Env = append(cmd.Env, ExtraGitParams...)
}
@@ -379,9 +355,7 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
LogDebug("git execute @", cwd, ":", cmd.Args)
out, err := cmd.CombinedOutput()
if !e.quiet {
LogDebug(string(out))
}
LogDebug(string(out))
if err != nil {
LogError("git", cmd.Args, " error:", err)
return "", fmt.Errorf("error executing: git %#v \n%s\n err: %w", cmd.Args, out, err)
@@ -390,13 +364,6 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
return string(out), nil
}
func (e *GitHandlerImpl) GitExecQuietOrPanic(cwd string, params ...string) {
e.quiet = true
e.GitExecOrPanic(cwd, params...)
e.quiet = false
return
}
type ChanIO struct {
ch chan byte
}
@@ -794,80 +761,6 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
return
}
// return (filename) -> (hash) map for all submodules
func (e *GitHandlerImpl) GitDirectoryList(gitPath, commitId string) (directoryList map[string]string, err error) {
var done sync.Mutex
directoryList = make(map[string]string)
done.Lock()
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
LogDebug("Getting directory for:", commitId)
go func() {
defer done.Unlock()
defer close(data_out.ch)
data_out.Write([]byte(commitId))
data_out.ch <- '\x00'
var c GitCommit
c, err = parseGitCommit(data_in.ch)
if err != nil {
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
return
}
trees := make(map[string]string)
trees[""] = c.Tree
for len(trees) > 0 {
for p, tree := range trees {
delete(trees, p)
data_out.Write([]byte(tree))
data_out.ch <- '\x00'
var tree GitTree
tree, err = parseGitTree(data_in.ch)
if err != nil {
err = fmt.Errorf("Error parsing git tree: %w", err)
return
}
for _, te := range tree.items {
if te.isTree() {
directoryList[p+te.name] = te.hash
}
}
}
}
}()
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
cmd.Env = []string{
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
"GIT_LFS_SKIP_SMUDGE=1",
"GIT_CONFIG_GLOBAL=/dev/null",
}
cmd.Dir = filepath.Join(e.GitPath, gitPath)
cmd.Stdout = &data_in
cmd.Stdin = &data_out
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
LogError(string(data))
return len(data), nil
})
LogDebug("command run:", cmd.Args)
if e := cmd.Run(); e != nil {
LogError(e)
close(data_in.ch)
close(data_out.ch)
return directoryList, e
}
done.Lock()
return directoryList, err
}
// return (filename) -> (hash) map for all submodules
func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error) {
var done sync.Mutex
@@ -1013,10 +906,193 @@ func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string)
return subCommitId, len(subCommitId) > 0
}
func (e *GitHandlerImpl) GitExecWithDataParse(cwd string, dataprocessor func(io.ByteReader) (Data, error), gitcmd string, args ...string) (Data, error) {
LogDebug("getting", gitcmd)
args = append([]string{gitcmd}, args...)
cmd := exec.Command("/usr/bin/git", args...)
const (
GitStatus_Untracked = 0
GitStatus_Modified = 1
GitStatus_Ignored = 2
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
GitStatus_Renamed = 4 // orig name in States[0]
)
type GitStatusData struct {
Path string
Status int
States [3]string
/*
<sub> A 4 character field describing the submodule state.
"N..." when the entry is not a submodule.
"S<c><m><u>" when the entry is a submodule.
<c> is "C" if the commit changed; otherwise ".".
<m> is "M" if it has tracked changes; otherwise ".".
<u> is "U" if there are untracked changes; otherwise ".".
*/
SubmoduleChanges string
}
func parseGitStatusHexString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 32)
for {
c, err := data.ReadByte()
if err != nil {
return "", err
}
switch {
case c == 0 || c == ' ':
return string(str), nil
case c >= 'a' && c <= 'f':
case c >= 'A' && c <= 'F':
case c >= '0' && c <= '9':
default:
return "", errors.New("Invalid character in hex string:" + string(c))
}
str = append(str, c)
}
}
func parseGitStatusString(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 || c == ' ' {
return string(str), nil
}
str = append(str, c)
}
}
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
str := make([]byte, 0, 100)
for {
c, err := data.ReadByte()
if err != nil {
return "", errors.New("Unexpected EOF. Expected NUL string term")
}
if c == 0 {
return string(str), nil
}
str = append(str, c)
}
}
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
for skipSpaceLen > 0 {
c, err := data.ReadByte()
if err != nil {
return err
}
if c == ' ' {
skipSpaceLen--
}
}
return nil
}
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
ret := GitStatusData{}
statusType, err := data.ReadByte()
if err != nil {
return nil, nil
}
switch statusType {
case '1':
var err error
if err = skipGitStatusEntry(data, 8); err != nil {
return nil, err
}
ret.Status = GitStatus_Modified
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
case '2':
var err error
if err = skipGitStatusEntry(data, 9); err != nil {
return nil, err
}
ret.Status = GitStatus_Renamed
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
ret.States[0], err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
case '?':
var err error
if err = skipGitStatusEntry(data, 1); err != nil {
return nil, err
}
ret.Status = GitStatus_Untracked
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
case '!':
var err error
if err = skipGitStatusEntry(data, 1); err != nil {
return nil, err
}
ret.Status = GitStatus_Ignored
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
case 'u':
var err error
if err = skipGitStatusEntry(data, 2); err != nil {
return nil, err
}
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
return nil, err
}
if err = skipGitStatusEntry(data, 4); err != nil {
return nil, err
}
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
if ret.States[1], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
if ret.States[2], err = parseGitStatusHexString(data); err != nil {
return nil, err
}
ret.Status = GitStatus_Unmerged
ret.Path, err = parseGitStatusStringWithSpace(data)
if err != nil {
return nil, err
}
default:
return nil, errors.New("Invalid status type" + string(statusType))
}
return &ret, nil
}
func parseGitStatusData(data io.ByteReader) ([]GitStatusData, error) {
ret := make([]GitStatusData, 0, 10)
for {
data, err := parseSingleStatusEntry(data)
if err != nil {
return nil, err
} else if data == nil {
break
}
ret = append(ret, *data)
}
return ret, nil
}
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
LogDebug("getting git-status()")
cmd := exec.Command("/usr/bin/git", "status", "--porcelain=2", "-z")
cmd.Env = []string{
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
"GIT_LFS_SKIP_SMUDGE=1",
@@ -1033,12 +1109,7 @@ func (e *GitHandlerImpl) GitExecWithDataParse(cwd string, dataprocessor func(io.
LogError("Error running command", cmd.Args, err)
}
return dataprocessor(bytes.NewReader(out))
}
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
data, err := e.GitExecWithDataParse(cwd, parseGitStatusData, "status", "--porcelain=2", "-z")
return data.([]GitStatusData), err
return parseGitStatusData(bufio.NewReader(bytes.NewReader(out)))
}
func (e *GitHandlerImpl) GitDiff(cwd, base, head string) (string, error) {
@@ -1063,122 +1134,3 @@ func (e *GitHandlerImpl) GitDiff(cwd, base, head string) (string, error) {
return string(out), nil
}
func (e *GitHandlerImpl) GitDiffIndex(cwd, commit string) ([]GitDiffRawData, error) {
data, err := e.GitExecWithDataParse("diff-index", parseGitDiffIndexRawData, cwd, "diff-index", "-z", "--raw", "--full-index", "--submodule=short", "HEAD")
return data.([]GitDiffRawData), err
}
func (git *GitHandlerImpl) GitResolveConflicts(cwd, mergeBase, head, mergeHead string) error {
status, err := git.GitStatus(cwd)
if err != nil {
return fmt.Errorf("Status failed: %w", err)
}
// we can only resolve conflicts with .gitmodules
for _, s := range status {
if s.Status == GitStatus_Unmerged && s.Path == ".gitmodules" {
if err := git.GitResolveSubmoduleFileConflict(s, cwd, mergeBase, head, mergeHead); err != nil {
return err
}
} else if s.Status == GitStatus_Unmerged {
return fmt.Errorf("Cannot automatically resolve conflict: %s", s.Path)
}
}
return git.GitExec(cwd, "-c", "core.editor=true", "merge", "--continue")
}
func (git *GitHandlerImpl) GitResolveSubmoduleFileConflict(s GitStatusData, cwd, mergeBase, head, mergeHead string) error {
submodules1, err := git.GitSubmoduleList(cwd, mergeBase)
if err != nil {
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
}
/*
submodules2, err := git.GitSubmoduleList(cwd, head)
if err != nil {
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
}
*/
submodules3, err := git.GitSubmoduleList(cwd, mergeHead)
if err != nil {
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
}
// find modified submodules in the mergeHead
modifiedSubmodules := make([]string, 0, 10)
removedSubmodules := make([]string, 0, 10)
addedSubmodules := make([]string, 0, 10)
for submodulePath, oldHash := range submodules1 {
if newHash, found := submodules3[submodulePath]; found && newHash != oldHash {
modifiedSubmodules = append(modifiedSubmodules, submodulePath)
} else if !found {
removedSubmodules = append(removedSubmodules, submodulePath)
}
}
for submodulePath, _ := range submodules3 {
if _, found := submodules1[submodulePath]; !found {
addedSubmodules = append(addedSubmodules, submodulePath)
}
}
// We need to adjust the `submodules` list by the pending changes to the index
s1, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[0])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
s2, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[1])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
s3, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[2])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
_, err = ParseSubmodulesFile(strings.NewReader(s1))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
overrideSet := make([]Submodule, 0, len(addedSubmodules)+len(modifiedSubmodules))
for i := range subs3 {
if slices.Contains(addedSubmodules, subs3[i].Path) || slices.Contains(modifiedSubmodules, subs3[i].Path) {
overrideSet = append(overrideSet, subs3[i])
}
}
// merge from subs1 (merge-base), subs2 (changes in base since merge-base HEAD), subs3 (merge_source MERGE_HEAD)
// this will update submodules
SubmoduleCompare := func(a, b Submodule) int { return strings.Compare(a.Path, b.Path) }
CompactCompare := func(a, b Submodule) bool { return a.Path == b.Path }
// remove submodules that are removed in the PR
subs2 = slices.DeleteFunc(subs2, func(a Submodule) bool { return slices.Contains(removedSubmodules, a.Path) })
mergedSubs := slices.Concat(overrideSet, subs2)
slices.SortStableFunc(mergedSubs, SubmoduleCompare)
filteredSubs := slices.CompactFunc(mergedSubs, CompactCompare)
out, err := os.Create(path.Join(git.GetPath(), cwd, ".gitmodules"))
if err != nil {
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
}
if err = WriteSubmodules(filteredSubs, out); err != nil {
return fmt.Errorf("Can't write .gitmodules: %w", err)
}
if out.Close(); err != nil {
return fmt.Errorf("Can't close .gitmodules: %w", err)
}
git.GitExecOrPanic(cwd, "add", ".gitmodules")
return nil
}

View File

@@ -24,7 +24,6 @@ import (
"os"
"os/exec"
"path"
"runtime/debug"
"slices"
"strings"
"testing"
@@ -94,145 +93,6 @@ func TestGitClone(t *testing.T) {
}
}
func TestSubmoduleConflictResolution(t *testing.T) {
tests := []struct {
name string
checkout, merge string
result string
merge_fail bool
}{
{
name: "adding two submodules",
checkout: "base_add_b1",
merge: "base_add_b2",
result: `[submodule "pkgA"]
path = pkgA
url = ../pkgA
[submodule "pkgB"]
path = pkgB
url = ../pkgB
[submodule "pkgB1"]
path = pkgB1
url = ../pkgB1
[submodule "pkgB2"]
path = pkgB2
url = ../pkgB2
[submodule "pkgC"]
path = pkgC
url = ../pkgC
`,
},
{
name: "remove one module and add another",
checkout: "base_rm_c",
merge: "base_add_b2",
result: `[submodule "pkgA"]
path = pkgA
url = ../pkgA
[submodule "pkgB"]
path = pkgB
url = ../pkgB
[submodule "pkgB2"]
path = pkgB2
url = ../pkgB2
`,
},
{
name: "add one and remove another",
checkout: "base_add_b2",
merge: "base_rm_c",
result: `[submodule "pkgA"]
path = pkgA
url = ../pkgA
[submodule "pkgB"]
path = pkgB
url = ../pkgB
[submodule "pkgB2"]
path = pkgB2
url = ../pkgB2
`,
},
{
name: "rm modified submodule",
checkout: "base_modify_c",
merge: "base_rm_c",
merge_fail: true,
},
{
name: "modified removed submodule",
checkout: "base_rm_c",
merge: "base_modify_c",
merge_fail: true,
},
}
d, err := os.MkdirTemp(os.TempDir(), "submoduletests")
if err != nil {
t.Fatal(err)
}
cwd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
cmd := exec.Command(cwd + "/test_repo_setup.sh")
cmd.Dir = d
_, err = cmd.Output()
if err != nil {
t.Fatal(err)
}
gh, err := AllocateGitWorkTree(d, "test", "foo@example.com")
if err != nil {
t.Fatal(err)
}
success := true
noErrorOrFail := func(t *testing.T, err error) {
if err != nil {
t.Fatal(string(debug.Stack()), err)
}
}
for _, test := range tests {
success = t.Run(test.name, func(t *testing.T) {
git, err := gh.ReadExistingPath("prjgit")
defer git.Close()
if err != nil {
t.Fatal(err)
}
noErrorOrFail(t, git.GitExec("", "reset", "--hard"))
noErrorOrFail(t, git.GitExec("", "checkout", "-B", "test", test.checkout))
// noErrorOrFail(t, git.GitExec("", "merge", test.checkout))
err = git.GitExec("", "merge", test.merge)
if err == nil {
t.Fatal("expected a conflict")
}
err = git.GitResolveConflicts("", "main", test.checkout, test.merge)
if err != nil {
if test.merge_fail {
return // success
}
t.Fatal(err)
}
if test.merge_fail {
t.Fatal("Expected fail but succeeded?")
}
data, err := os.ReadFile(git.GetPath() + "/.gitmodules")
if err != nil {
t.Fatal("Cannot read .gitmodules.", err)
}
if string(data) != test.result {
t.Error("Expected", len(test.result), test.result, "but have", len(data), string(data))
}
}) && success
}
if success {
os.RemoveAll(d)
}
}
func TestGitMsgParsing(t *testing.T) {
t.Run("tree message with size 56", func(t *testing.T) {
const hdr = "f40888ea4515fe2e8eea617a16f5f50a45f652d894de3ad181d58de3aafb8f98 tree 56\x00"
@@ -532,7 +392,6 @@ func TestCommitTreeParsing(t *testing.T) {
commitId = commitId + strings.TrimSpace(string(data))
return len(data), nil
})
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
t.Fatal(err.Error())
}
@@ -724,15 +583,12 @@ func TestGitStatusParse(t *testing.T) {
if err != nil {
t.Fatal(err)
}
res := r.([]GitStatusData)
if len(res) != len(test.res) {
t.Fatal("len(r):", len(res), "is not expected", len(test.res))
if len(r) != len(test.res) {
t.Fatal("len(r):", len(r), "is not expected", len(test.res))
}
for _, expected := range test.res {
if !slices.Contains(res, expected) {
if !slices.Contains(r, expected) {
t.Fatal("result", r, "doesn't contains expected", expected)
}
}

View File

@@ -160,10 +160,6 @@ type GiteaCommitStatusGetter interface {
GetCommitStatus(org, repo, hash string) ([]*models.CommitStatus, error)
}
type GiteaMerger interface {
ManualMergePR(org, repo string, id int64, commitid string, delBranch bool) error
}
type Gitea interface {
GiteaComment
GiteaRepoFetcher
@@ -172,7 +168,6 @@ type Gitea interface {
GiteaReviewer
GiteaPRFetcher
GiteaPRUpdater
GiteaMerger
GiteaCommitFetcher
GiteaReviewFetcher
GiteaCommentFetcher
@@ -182,6 +177,7 @@ type Gitea interface {
GiteaCommitStatusGetter
GiteaCommitStatusSetter
GiteaSetRepoOptions
GiteaTimelineFetcher
GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error)
GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error)
@@ -237,11 +233,6 @@ func (gitea *GiteaTransport) GetPullRequest(org, project string, num int64) (*mo
gitea.transport.DefaultAuthentication,
)
if err != nil {
LogError(err)
return nil, err
}
return pr.Payload, err
}
@@ -255,36 +246,9 @@ func (gitea *GiteaTransport) UpdatePullRequest(org, repo string, num int64, opti
gitea.transport.DefaultAuthentication,
)
if err != nil {
LogError(err)
return nil, err
}
return pr.Payload, err
}
func (gitea *GiteaTransport) ManualMergePR(org, repo string, num int64, commitid string, delBranch bool) error {
manual_merge := "manually-merged"
_, err := gitea.client.Repository.RepoMergePullRequest(
repository.NewRepoMergePullRequestParams().
WithOwner(org).
WithRepo(repo).
WithIndex(num).
WithBody(&models.MergePullRequestForm{
Do: &manual_merge,
DeleteBranchAfterMerge: delBranch,
HeadCommitID: commitid,
}), gitea.transport.DefaultAuthentication,
)
if err != nil {
LogError(err)
return err
}
return nil
}
func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRequest, error) {
var page, limit int64
@@ -309,9 +273,6 @@ func (gitea *GiteaTransport) GetPullRequests(org, repo string) ([]*models.PullRe
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", org, repo, err)
}
if len(req.Payload) == 0 {
break
}
prs = slices.Concat(prs, req.Payload)
if len(req.Payload) < int(limit) {
break
@@ -334,11 +295,11 @@ func (gitea *GiteaTransport) GetCommitStatus(org, repo, hash string) ([]*models.
if err != nil {
return res, err
}
if len(r.Payload) == 0 {
res = append(res, r.Payload...)
if len(r.Payload) < int(limit) {
break
}
res = append(res, r.Payload...)
page++
}
return res, nil
@@ -399,10 +360,10 @@ func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum in
return nil, err
}
if len(reviews.Payload) == 0 {
allReviews = slices.Concat(allReviews, reviews.Payload)
if len(reviews.Payload) < int(limit) {
break
}
allReviews = slices.Concat(allReviews, reviews.Payload)
page++
}
@@ -492,9 +453,6 @@ func (gitea *GiteaTransport) GetNotifications(Type string, since *time.Time) ([]
return nil, err
}
if len(list.Payload) == 0 {
break
}
ret = slices.Concat(ret, list.Payload)
if len(list.Payload) < int(bigLimit) {
break
@@ -652,13 +610,9 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
}
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
repository.NewRepoGetPullRequestByBaseHeadParams().
WithOwner(repo.Owner.UserName).
WithRepo(repo.Name).
WithBase(targetId).
WithHead(srcId),
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(targetId).WithHead(srcId),
gitea.transport.DefaultAuthentication,
); err == nil && pr.Payload.State == "open" {
); err == nil {
return pr.Payload, nil
}
@@ -785,18 +739,12 @@ func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models
resCount = len(res.Payload)
LogDebug("page:", page, "len:", resCount)
if resCount == 0 {
break
}
page++
for _, d := range res.Payload {
if d != nil {
retData = append(retData, d)
}
}
retData = append(retData, res.Payload...)
}
LogDebug("total results:", len(retData))
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
return time.Time(b.Created).Compare(time.Time(a.Created))
})

324
common/listen.go Normal file
View File

@@ -0,0 +1,324 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"runtime/debug"
"slices"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type ListenDefinitions struct {
RabbitURL *url.URL // amqps://user:password@host/queue
GitAuthor string
Handlers map[string]RequestProcessor
Orgs []string
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitMessage rabbitmq.Delivery
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
ch, err := connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer ch.Close()
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if ch.IsClosed() {
ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.processTopicChanges(ch, q.Name)
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *ListenDefinitions) connectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.connectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.processRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection. %#v", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *ListenDefinitions) connectToRabbitMQ() chan RabbitMessage {
ch := make(chan RabbitMessage, 100)
go l.connectAndProcessRabbitMQ(ch)
return ch
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}
func (l *ListenDefinitions) generateTopics() []string {
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
scope := "suse"
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range l.Orgs {
for requestType, _ := range l.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (l *ListenDefinitions) UpdateTopics() {
newTopics := l.generateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
LogDebug("# Handlers:", len(l.Handlers))
LogDebug("# Orgs:", len(l.Orgs))
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = l.generateTopics()
ch := l.connectToRabbitMQ()
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(l.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
continue
}
LogDebug("org:", org, "type:", reqType)
if handler, found := l.Handlers[reqType]; found {
/* h, err := CreateRequestHandler()
if err != nil {
log.Println("Cannot create request handler", err)
continue
}
*/
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
continue
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
}
}
}
}

View File

@@ -50,13 +50,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
u, _ := url.Parse("amqps://rabbit.example.com")
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l := &RabbitMQGiteaEventsProcessor{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
c: &RabbitConnection{
RabbitURL: u,
topicSubChanges: make(chan string, len(test.topicDelta)*10),
},
l := ListenDefinitions{
Orgs: test.orgs1,
Handlers: make(map[string]RequestProcessor),
topicSubChanges: make(chan string, len(test.topicDelta)*10),
RabbitURL: u,
}
slices.Sort(test.topicDelta)
@@ -66,11 +64,11 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
}
changes := []string{}
l.c.UpdateTopics(l)
l.UpdateTopics()
a:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
changes = []string{}
@@ -80,13 +78,13 @@ func TestListenDefinitionsTopicUpdate(t *testing.T) {
l.Orgs = test.orgs2
l.c.UpdateTopics(l)
l.UpdateTopics()
changes = []string{}
b:
for {
select {
case c := <-l.c.topicSubChanges:
case c := <-l.topicSubChanges:
changes = append(changes, c)
default:
slices.Sort(changes)

View File

@@ -63,10 +63,6 @@ func SetLoggingLevel(ll LogLevel) {
logLevel = ll
}
func GetLoggingLevel() LogLevel {
return logLevel
}
func SetLoggingLevelFromString(ll string) error {
switch ll {
case "info":

View File

@@ -13,10 +13,10 @@ import (
//go:generate mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
type MaintainershipData interface {
ListProjectMaintainers(OptionalGroupExpansion []*ReviewGroup) []string
ListPackageMaintainers(Pkg string, OptionalGroupExpasion []*ReviewGroup) []string
ListProjectMaintainers() []string
ListPackageMaintainers(pkg string) []string
IsApproved(Pkg string, Reviews []*models.PullReview, Submitter string, ReviewGroups []*ReviewGroup) bool
IsApproved(pkg string, reviews []*models.PullReview, submitter string) bool
}
const ProjectKey = ""
@@ -70,7 +70,7 @@ func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit
return m, err
}
func (data *MaintainershipMap) ListProjectMaintainers(groups []*ReviewGroup) []string {
func (data *MaintainershipMap) ListProjectMaintainers() []string {
if data == nil {
return nil
}
@@ -80,11 +80,6 @@ func (data *MaintainershipMap) ListProjectMaintainers(groups []*ReviewGroup) []s
return nil
}
// expands groups
for _, g := range groups {
m = g.ExpandMaintainers(m)
}
return m
}
@@ -101,7 +96,7 @@ func parsePkgDirData(pkg string, data []byte) []string {
return pkgMaintainers
}
func (data *MaintainershipMap) ListPackageMaintainers(pkg string, groups []*ReviewGroup) []string {
func (data *MaintainershipMap) ListPackageMaintainers(pkg string) []string {
if data == nil {
return nil
}
@@ -116,7 +111,7 @@ func (data *MaintainershipMap) ListPackageMaintainers(pkg string, groups []*Revi
}
}
}
prjMaintainers := data.ListProjectMaintainers(nil)
prjMaintainers := data.ListProjectMaintainers()
prjMaintainer:
for _, prjm := range prjMaintainers {
@@ -128,20 +123,15 @@ prjMaintainer:
pkgMaintainers = append(pkgMaintainers, prjm)
}
// expands groups
for _, g := range groups {
pkgMaintainers = g.ExpandMaintainers(pkgMaintainers)
}
return pkgMaintainers
}
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview, submitter string, groups []*ReviewGroup) bool {
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview, submitter string) bool {
var reviewers []string
if pkg != ProjectKey {
reviewers = data.ListPackageMaintainers(pkg, groups)
reviewers = data.ListPackageMaintainers(pkg)
} else {
reviewers = data.ListProjectMaintainers(groups)
reviewers = data.ListProjectMaintainers()
}
if len(reviewers) == 0 {

View File

@@ -28,8 +28,6 @@ func TestMaintainership(t *testing.T) {
maintainersFile []byte
maintainersFileErr error
groups []*common.ReviewGroup
maintainersDir map[string][]byte
}{
/* PACKAGE MAINTAINERS */
@@ -53,22 +51,6 @@ func TestMaintainership(t *testing.T) {
maintainers: []string{"user1", "user2", "user3"},
packageName: "pkg",
},
{
name: "Multiple package maintainers and groups",
maintainersFile: []byte(`{"pkg": ["user1", "user2", "g2"], "": ["g2", "user1", "user3"]}`),
maintainersDir: map[string][]byte{
"_project": []byte(`{"": ["user1", "user3", "g2"]}`),
"pkg": []byte(`{"pkg": ["user1", "g2", "user2"]}`),
},
maintainers: []string{"user1", "user2", "user3", "user5"},
packageName: "pkg",
groups: []*common.ReviewGroup{
{
Name: "g2",
Reviewers: []string{"user1", "user5"},
},
},
},
{
name: "No package maintainers and only project maintainer",
maintainersFile: []byte(`{"pkg2": ["user1", "user2"], "": ["user1", "user3"]}`),
@@ -156,9 +138,9 @@ func TestMaintainership(t *testing.T) {
var m []string
if len(test.packageName) > 0 {
m = maintainers.ListPackageMaintainers(test.packageName, test.groups)
m = maintainers.ListPackageMaintainers(test.packageName)
} else {
m = maintainers.ListProjectMaintainers(test.groups)
m = maintainers.ListProjectMaintainers()
}
if len(m) != len(test.maintainers) {
@@ -225,7 +207,7 @@ func TestMaintainershipFileWrite(t *testing.T) {
{
name: "2 project maintainers and 2 single package maintainers",
maintainers: map[string][]string{
"": {"two", "one"},
"": {"two", "one"},
"pkg1": {},
"foo": {"four", "byte"},
},

View File

@@ -1,56 +0,0 @@
package common
import (
"os"
"path"
"strings"
"gopkg.in/yaml.v3"
)
type Manifest struct {
Subdirectories []string
}
func (m *Manifest) SubdirForPackage(pkg string) string {
if m == nil {
return pkg
}
idx := -1
matchLen := 0
basePkg := path.Base(pkg)
lowercasePkg := strings.ToLower(basePkg)
for i, sub := range m.Subdirectories {
basename := strings.ToLower(path.Base(sub))
if strings.HasPrefix(lowercasePkg, basename) && matchLen < len(basename) {
idx = i
matchLen = len(basename)
}
}
if idx > -1 {
return path.Join(m.Subdirectories[idx], basePkg)
}
return pkg
}
func ReadManifestFile(filename string) (*Manifest, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
return ParseManifestFile(data)
}
func ParseManifestFile(data []byte) (*Manifest, error) {
ret := &Manifest{}
err := yaml.Unmarshal(data, ret)
if err != nil {
return nil, err
}
return ret, nil
}

View File

@@ -1,56 +0,0 @@
package common_test
import (
"testing"
"src.opensuse.org/autogits/common"
)
func TestManifestSubdirAssignments(t *testing.T) {
tests := []struct {
Name string
ManifestContent string
Packages []string
ManifestLocations []string
}{
{
Name: "empty manifest",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
ManifestLocations: []string{"atom", "blarg", "Foobar", "X-Ray", "boost", "NodeJS"},
},
{
Name: "only few subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "b/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "NodeJS"},
},
{
Name: "multilayer subdirs manifest with trailing /",
ManifestContent: "subdirectories:\n - a\n - b\n - libs/boo/\n - somedir/Node/",
Packages: []string{"atom", "blarg", "Foobar", "X-Ray", "Boost", "NodeJS", "foobar/node2"},
ManifestLocations: []string{"a/atom", "b/blarg", "Foobar", "X-Ray", "libs/boo/Boost", "somedir/Node/NodeJS", "somedir/Node/node2"},
},
}
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
m, err := common.ParseManifestFile([]byte(test.ManifestContent))
if err != nil {
t.Fatal(err)
}
for i, pkg := range test.Packages {
expected := test.ManifestLocations[i]
if l := m.SubdirForPackage(pkg); l != expected {
t.Error("Expected:", expected, "but got:", l)
}
}
})
}
}

View File

@@ -1,120 +0,0 @@
// Code generated by MockGen. DO NOT EDIT.
// Source: config.go
//
// Generated by this command:
//
// mockgen -source=config.go -destination=mock/config.go -typed
//
// Package mock_common is a generated GoMock package.
package mock_common
import (
reflect "reflect"
gomock "go.uber.org/mock/gomock"
models "src.opensuse.org/autogits/common/gitea-generated/models"
)
// MockGiteaFileContentAndRepoFetcher is a mock of GiteaFileContentAndRepoFetcher interface.
type MockGiteaFileContentAndRepoFetcher struct {
ctrl *gomock.Controller
recorder *MockGiteaFileContentAndRepoFetcherMockRecorder
isgomock struct{}
}
// MockGiteaFileContentAndRepoFetcherMockRecorder is the mock recorder for MockGiteaFileContentAndRepoFetcher.
type MockGiteaFileContentAndRepoFetcherMockRecorder struct {
mock *MockGiteaFileContentAndRepoFetcher
}
// NewMockGiteaFileContentAndRepoFetcher creates a new mock instance.
func NewMockGiteaFileContentAndRepoFetcher(ctrl *gomock.Controller) *MockGiteaFileContentAndRepoFetcher {
mock := &MockGiteaFileContentAndRepoFetcher{ctrl: ctrl}
mock.recorder = &MockGiteaFileContentAndRepoFetcherMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockGiteaFileContentAndRepoFetcher) EXPECT() *MockGiteaFileContentAndRepoFetcherMockRecorder {
return m.recorder
}
// GetRepository mocks base method.
func (m *MockGiteaFileContentAndRepoFetcher) GetRepository(org, repo string) (*models.Repository, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetRepository", org, repo)
ret0, _ := ret[0].(*models.Repository)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetRepository indicates an expected call of GetRepository.
func (mr *MockGiteaFileContentAndRepoFetcherMockRecorder) GetRepository(org, repo any) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRepository", reflect.TypeOf((*MockGiteaFileContentAndRepoFetcher)(nil).GetRepository), org, repo)
return &MockGiteaFileContentAndRepoFetcherGetRepositoryCall{Call: call}
}
// MockGiteaFileContentAndRepoFetcherGetRepositoryCall wrap *gomock.Call
type MockGiteaFileContentAndRepoFetcherGetRepositoryCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) Return(arg0 *models.Repository, arg1 error) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
c.Call = c.Call.Return(arg0, arg1)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) Do(f func(string, string) (*models.Repository, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryCall) DoAndReturn(f func(string, string) (*models.Repository, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// GetRepositoryFileContent mocks base method.
func (m *MockGiteaFileContentAndRepoFetcher) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetRepositoryFileContent", org, repo, hash, path)
ret0, _ := ret[0].([]byte)
ret1, _ := ret[1].(string)
ret2, _ := ret[2].(error)
return ret0, ret1, ret2
}
// GetRepositoryFileContent indicates an expected call of GetRepositoryFileContent.
func (mr *MockGiteaFileContentAndRepoFetcherMockRecorder) GetRepositoryFileContent(org, repo, hash, path any) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRepositoryFileContent", reflect.TypeOf((*MockGiteaFileContentAndRepoFetcher)(nil).GetRepositoryFileContent), org, repo, hash, path)
return &MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall{Call: call}
}
// MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall wrap *gomock.Call
type MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) Return(arg0 []byte, arg1 string, arg2 error) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
c.Call = c.Call.Return(arg0, arg1, arg2)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) Do(f func(string, string, string, string) ([]byte, string, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall) DoAndReturn(f func(string, string, string, string) ([]byte, string, error)) *MockGiteaFileContentAndRepoFetcherGetRepositoryFileContentCall {
c.Call = c.Call.DoAndReturn(f)
return c
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,156 +0,0 @@
// Code generated by MockGen. DO NOT EDIT.
// Source: maintainership.go
//
// Generated by this command:
//
// mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
//
// Package mock_common is a generated GoMock package.
package mock_common
import (
reflect "reflect"
gomock "go.uber.org/mock/gomock"
common "src.opensuse.org/autogits/common"
models "src.opensuse.org/autogits/common/gitea-generated/models"
)
// MockMaintainershipData is a mock of MaintainershipData interface.
type MockMaintainershipData struct {
ctrl *gomock.Controller
recorder *MockMaintainershipDataMockRecorder
isgomock struct{}
}
// MockMaintainershipDataMockRecorder is the mock recorder for MockMaintainershipData.
type MockMaintainershipDataMockRecorder struct {
mock *MockMaintainershipData
}
// NewMockMaintainershipData creates a new mock instance.
func NewMockMaintainershipData(ctrl *gomock.Controller) *MockMaintainershipData {
mock := &MockMaintainershipData{ctrl: ctrl}
mock.recorder = &MockMaintainershipDataMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockMaintainershipData) EXPECT() *MockMaintainershipDataMockRecorder {
return m.recorder
}
// IsApproved mocks base method.
func (m *MockMaintainershipData) IsApproved(Pkg string, Reviews []*models.PullReview, Submitter string, ReviewGroups []*common.ReviewGroup) bool {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "IsApproved", Pkg, Reviews, Submitter, ReviewGroups)
ret0, _ := ret[0].(bool)
return ret0
}
// IsApproved indicates an expected call of IsApproved.
func (mr *MockMaintainershipDataMockRecorder) IsApproved(Pkg, Reviews, Submitter, ReviewGroups any) *MockMaintainershipDataIsApprovedCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsApproved", reflect.TypeOf((*MockMaintainershipData)(nil).IsApproved), Pkg, Reviews, Submitter, ReviewGroups)
return &MockMaintainershipDataIsApprovedCall{Call: call}
}
// MockMaintainershipDataIsApprovedCall wrap *gomock.Call
type MockMaintainershipDataIsApprovedCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockMaintainershipDataIsApprovedCall) Return(arg0 bool) *MockMaintainershipDataIsApprovedCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockMaintainershipDataIsApprovedCall) Do(f func(string, []*models.PullReview, string, []*common.ReviewGroup) bool) *MockMaintainershipDataIsApprovedCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockMaintainershipDataIsApprovedCall) DoAndReturn(f func(string, []*models.PullReview, string, []*common.ReviewGroup) bool) *MockMaintainershipDataIsApprovedCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// ListPackageMaintainers mocks base method.
func (m *MockMaintainershipData) ListPackageMaintainers(Pkg string, OptionalGroupExpasion []*common.ReviewGroup) []string {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListPackageMaintainers", Pkg, OptionalGroupExpasion)
ret0, _ := ret[0].([]string)
return ret0
}
// ListPackageMaintainers indicates an expected call of ListPackageMaintainers.
func (mr *MockMaintainershipDataMockRecorder) ListPackageMaintainers(Pkg, OptionalGroupExpasion any) *MockMaintainershipDataListPackageMaintainersCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListPackageMaintainers", reflect.TypeOf((*MockMaintainershipData)(nil).ListPackageMaintainers), Pkg, OptionalGroupExpasion)
return &MockMaintainershipDataListPackageMaintainersCall{Call: call}
}
// MockMaintainershipDataListPackageMaintainersCall wrap *gomock.Call
type MockMaintainershipDataListPackageMaintainersCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockMaintainershipDataListPackageMaintainersCall) Return(arg0 []string) *MockMaintainershipDataListPackageMaintainersCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockMaintainershipDataListPackageMaintainersCall) Do(f func(string, []*common.ReviewGroup) []string) *MockMaintainershipDataListPackageMaintainersCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockMaintainershipDataListPackageMaintainersCall) DoAndReturn(f func(string, []*common.ReviewGroup) []string) *MockMaintainershipDataListPackageMaintainersCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// ListProjectMaintainers mocks base method.
func (m *MockMaintainershipData) ListProjectMaintainers(OptionalGroupExpansion []*common.ReviewGroup) []string {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListProjectMaintainers", OptionalGroupExpansion)
ret0, _ := ret[0].([]string)
return ret0
}
// ListProjectMaintainers indicates an expected call of ListProjectMaintainers.
func (mr *MockMaintainershipDataMockRecorder) ListProjectMaintainers(OptionalGroupExpansion any) *MockMaintainershipDataListProjectMaintainersCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListProjectMaintainers", reflect.TypeOf((*MockMaintainershipData)(nil).ListProjectMaintainers), OptionalGroupExpansion)
return &MockMaintainershipDataListProjectMaintainersCall{Call: call}
}
// MockMaintainershipDataListProjectMaintainersCall wrap *gomock.Call
type MockMaintainershipDataListProjectMaintainersCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockMaintainershipDataListProjectMaintainersCall) Return(arg0 []string) *MockMaintainershipDataListProjectMaintainersCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockMaintainershipDataListProjectMaintainersCall) Do(f func([]*common.ReviewGroup) []string) *MockMaintainershipDataListProjectMaintainersCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockMaintainershipDataListProjectMaintainersCall) DoAndReturn(f func([]*common.ReviewGroup) []string) *MockMaintainershipDataListProjectMaintainersCall {
c.Call = c.Call.DoAndReturn(f)
return c
}

View File

@@ -1,85 +0,0 @@
// Code generated by MockGen. DO NOT EDIT.
// Source: obs_utils.go
//
// Generated by this command:
//
// mockgen -source=obs_utils.go -destination=mock/obs_utils.go -typed
//
// Package mock_common is a generated GoMock package.
package mock_common
import (
reflect "reflect"
gomock "go.uber.org/mock/gomock"
common "src.opensuse.org/autogits/common"
)
// MockObsStatusFetcherWithState is a mock of ObsStatusFetcherWithState interface.
type MockObsStatusFetcherWithState struct {
ctrl *gomock.Controller
recorder *MockObsStatusFetcherWithStateMockRecorder
isgomock struct{}
}
// MockObsStatusFetcherWithStateMockRecorder is the mock recorder for MockObsStatusFetcherWithState.
type MockObsStatusFetcherWithStateMockRecorder struct {
mock *MockObsStatusFetcherWithState
}
// NewMockObsStatusFetcherWithState creates a new mock instance.
func NewMockObsStatusFetcherWithState(ctrl *gomock.Controller) *MockObsStatusFetcherWithState {
mock := &MockObsStatusFetcherWithState{ctrl: ctrl}
mock.recorder = &MockObsStatusFetcherWithStateMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockObsStatusFetcherWithState) EXPECT() *MockObsStatusFetcherWithStateMockRecorder {
return m.recorder
}
// BuildStatusWithState mocks base method.
func (m *MockObsStatusFetcherWithState) BuildStatusWithState(project string, opts *common.BuildResultOptions, packages ...string) (*common.BuildResultList, error) {
m.ctrl.T.Helper()
varargs := []any{project, opts}
for _, a := range packages {
varargs = append(varargs, a)
}
ret := m.ctrl.Call(m, "BuildStatusWithState", varargs...)
ret0, _ := ret[0].(*common.BuildResultList)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// BuildStatusWithState indicates an expected call of BuildStatusWithState.
func (mr *MockObsStatusFetcherWithStateMockRecorder) BuildStatusWithState(project, opts any, packages ...any) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
mr.mock.ctrl.T.Helper()
varargs := append([]any{project, opts}, packages...)
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildStatusWithState", reflect.TypeOf((*MockObsStatusFetcherWithState)(nil).BuildStatusWithState), varargs...)
return &MockObsStatusFetcherWithStateBuildStatusWithStateCall{Call: call}
}
// MockObsStatusFetcherWithStateBuildStatusWithStateCall wrap *gomock.Call
type MockObsStatusFetcherWithStateBuildStatusWithStateCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) Return(arg0 *common.BuildResultList, arg1 error) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
c.Call = c.Call.Return(arg0, arg1)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) Do(f func(string, *common.BuildResultOptions, ...string) (*common.BuildResultList, error)) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) DoAndReturn(f func(string, *common.BuildResultOptions, ...string) (*common.BuildResultList, error)) *MockObsStatusFetcherWithStateBuildStatusWithStateCall {
c.Call = c.Call.DoAndReturn(f)
return c
}

View File

@@ -127,12 +127,10 @@ type ProjectMeta struct {
Groups []GroupRepoMeta `xml:"group"`
Repositories []RepositoryMeta `xml:"repository"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
Access Flags `xml:"access"`
SourceAccess Flags `xml:"sourceaccess"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
}
type PackageMeta struct {
@@ -142,12 +140,6 @@ type PackageMeta struct {
ScmSync string `xml:"scmsync"`
Persons []PersonRepoMeta `xml:"person"`
Groups []GroupRepoMeta `xml:"group"`
BuildFlags Flags `xml:"build"`
PublicFlags Flags `xml:"publish"`
DebugFlags Flags `xml:"debuginfo"`
UseForBuild Flags `xml:"useforbuild"`
SourceAccess Flags `xml:"sourceaccess"`
}
type UserMeta struct {
@@ -570,58 +562,25 @@ func (c *ObsClient) DeleteProject(project string) error {
}
return nil
}
func (c *ObsClient) BuildLog(prj, pkg, repo, arch string) (io.ReadCloser, error) {
url := c.baseUrl.JoinPath("build", prj, repo, arch, pkg, "_log")
query := url.Query()
query.Add("nostream", "1")
query.Add("start", "0")
url.RawQuery = query.Encode()
res, err := c.ObsRequestRaw("GET", url.String(), nil)
if err != nil {
return nil, err
}
return res.Body, nil
}
type PackageBuildStatus struct {
Package string `xml:"package,attr"`
Code string `xml:"code,attr"`
Details string `xml:"details"`
LastUpdate time.Time
}
func PackageBuildStatusComp(A, B *PackageBuildStatus) int {
return strings.Compare(A.Package, B.Package)
}
type BuildResult struct {
XMLName xml.Name `xml:"result" json:"xml,omitempty"`
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr,omitempty"`
ScmSync string `xml:"scmsync,omitempty"`
ScmInfo string `xml:"scminfo,omitempty"`
Status []*PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist,omitempty"`
LastUpdate time.Time
}
func BuildResultComp(A, B *BuildResult) int {
if cmp := strings.Compare(A.Project, B.Project); cmp != 0 {
return cmp
}
if cmp := strings.Compare(A.Repository, B.Repository); cmp != 0 {
return cmp
}
return strings.Compare(A.Arch, B.Arch)
Project string `xml:"project,attr"`
Repository string `xml:"repository,attr"`
Arch string `xml:"arch,attr"`
Code string `xml:"code,attr"`
Dirty bool `xml:"dirty,attr"`
ScmSync string `xml:"scmsync"`
ScmInfo string `xml:"scminfo"`
Status []PackageBuildStatus `xml:"status"`
Binaries []BinaryList `xml:"binarylist"`
}
type Binary struct {
@@ -636,9 +595,9 @@ type BinaryList struct {
}
type BuildResultList struct {
XMLName xml.Name `xml:"resultlist"`
State string `xml:"state,attr"`
Result []*BuildResult `xml:"result"`
XMLName xml.Name `xml:"resultlist"`
State string `xml:"state,attr"`
Result []BuildResult `xml:"result"`
isLastBuild bool
}

View File

@@ -4,10 +4,11 @@ import (
"bufio"
"errors"
"fmt"
"os"
"path"
"slices"
"strings"
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
"src.opensuse.org/autogits/common/gitea-generated/models"
)
@@ -24,13 +25,6 @@ type PRSet struct {
BotUser string
}
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
org = prinfo.PR.Base.Repo.Owner.UserName
repo = prinfo.PR.Base.Repo.Name
idx = prinfo.PR.Index
return
}
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
for _, p := range currentSet {
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
@@ -61,15 +55,14 @@ func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRIn
var Timeline_RefIssueNotFound error = errors.New("RefIssue not found on the timeline")
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
func LastPrjGitRefOnTimeline(gitea GiteaPRTimelineFetcher, org, repo string, num int64, prjGitOrg, prjGitRepo string) (*models.PullRequest, error) {
prRefLine := fmt.Sprintf(PrPattern, org, repo, num)
timeline, err := gitea.GetTimeline(org, repo, num)
if err != nil {
LogError("Failed to fetch timeline for", org, repo, "#", num, err)
return nil, err
}
prjGitOrg, prjGitRepo, prjGitBranch := config.GetPrjGit()
for idx := len(timeline) - 1; idx >= 0; idx-- {
item := timeline[idx]
issue := item.RefIssue
@@ -79,27 +72,9 @@ func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineFetcher, org,
issue.Repository.Owner == prjGitOrg &&
issue.Repository.Name == prjGitRepo {
if !config.NoProjectGitPR {
if issue.User.UserName != botUser {
continue
}
}
pr, err := gitea.GetPullRequest(prjGitOrg, prjGitRepo, issue.Index)
switch err.(type) {
case *repository.RepoGetPullRequestNotFound: // deleted?
continue
default:
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
}
if pr.Base.Ref != prjGitBranch {
continue
}
_, prs := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(item.RefIssue.Body)))
for _, pr := range prs {
if pr.Org == org && pr.Repo == repo && pr.Num == num {
lines := SplitLines(item.RefIssue.Body)
for _, line := range lines {
if strings.TrimSpace(line) == prRefLine {
LogDebug("Found PrjGit PR in Timeline:", issue.Index)
// found prjgit PR in timeline. Return it
@@ -123,7 +98,7 @@ func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num
return nil, err
}
} else {
if pr, err = LastPrjGitRefOnTimeline(user, gitea, org, repo, num, config); err != nil && err != Timeline_RefIssueNotFound {
if pr, err = LastPrjGitRefOnTimeline(gitea, org, repo, num, prjGitOrg, prjGitRepo); err != nil && err != Timeline_RefIssueNotFound {
return nil, err
}
@@ -250,11 +225,11 @@ func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintaine
reviewers = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
if len(rs.PRs) == 1 {
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers(nil))
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers())
}
} else {
pkg := pr.PR.Base.Repo.Name
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(nil), maintainers.ListPackageMaintainers(pkg, nil), configReviewers.PkgOptional)
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(), maintainers.ListPackageMaintainers(pkg), configReviewers.PkgOptional)
}
slices.Sort(reviewers)
@@ -291,7 +266,7 @@ func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintaine
if !IsDryRun {
for _, r := range reviewers {
if _, err := gitea.RequestReviews(pr.PR, r); err != nil {
LogError("Cannot create reviews on", fmt.Sprintf("%s/%s!%d for [%s]", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", ")), err)
LogError("Cannot create reviews on", fmt.Sprintf("%s/%s#%d for [%s]", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", ")), err)
}
}
}
@@ -300,22 +275,15 @@ func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintaine
return nil
}
func (rs *PRSet) RemoveClosedPRs() {
rs.PRs = slices.DeleteFunc(rs.PRs, func(pr *PRInfo) bool {
return pr.PR.State != "open"
})
}
func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData) bool {
configReviewers := ParseReviewers(rs.Config.Reviewers)
is_manually_reviewed_ok := false
if need_manual_review := rs.Config.ManualMergeOnly || rs.Config.ManualMergeProject; need_manual_review {
// Groups are expanded here because any group member can issue "merge ok" to the BotUser
groups := rs.Config.ReviewGroups
prjgit, err := rs.GetPrjGitPR()
if err == nil && prjgit != nil {
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers(groups))
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers())
LogDebug("Fetching reviews for", prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
r, err := FetchGiteaReviews(gitea, reviewers, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
if err != nil {
@@ -335,7 +303,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
}
pkg := pr.PR.Base.Repo.Name
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg, groups))
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg))
LogDebug("Fetching reviews for", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
if err != nil {
@@ -376,22 +344,18 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
if err != nil {
LogError("Cannot fetch gitea reaviews for PR:", err)
LogError("Cannot fetch gita reaviews for PR:", err)
return false
}
is_manually_reviewed_ok = r.IsApproved()
LogDebug("PR to", pr.PR.Base.Repo.Name, "reviewed?", is_manually_reviewed_ok)
LogDebug(pr.PR.Base.Repo.Name, is_manually_reviewed_ok)
if !is_manually_reviewed_ok {
if GetLoggingLevel() > LogLevelInfo {
LogDebug("missing reviewers:", r.MissingReviews())
}
return false
}
if need_maintainer_review := !rs.IsPrjGitPR(pr.PR) || pr.PR.User.UserName != rs.BotUser; need_maintainer_review {
// Do not expand groups here, as the group-review-bot will ACK if group has reviewed.
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName); !is_manually_reviewed_ok {
LogDebug(" not approved?", pkg)
return false
}
@@ -409,8 +373,7 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
}
prjgit := prjgit_info.PR
_, _, prjgitBranch := rs.Config.GetPrjGit()
remote, err := git.GitClone(DefaultGitPrj, prjgitBranch, prjgit.Base.Repo.SSHURL)
remote, err := git.GitClone(DefaultGitPrj, rs.Config.Branch, prjgit.Base.Repo.SSHURL)
PanicOnError(err)
git.GitExecOrPanic(DefaultGitPrj, "fetch", remote, prjgit.Head.Sha)
@@ -427,12 +390,83 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
panic("FIXME")
}
*/
msg := fmt.Sprintf("Merging\n\nPR: %s/%s!%d", prjgit.Base.Repo.Owner.UserName, prjgit.Base.Repo.Name, prjgit.Index)
msg := fmt.Sprintf("Merging\n\nPR: %s/%s#%d", prjgit.Base.Repo.Owner.UserName, prjgit.Base.Repo.Name, prjgit.Index)
err = git.GitExec(DefaultGitPrj, "merge", "--no-ff", "-m", msg, prjgit.Head.Sha)
if err != nil {
if resolveError := git.GitResolveConflicts(DefaultGitPrj, prjgit.MergeBase, prjgit.Base.Sha, prjgit.Head.Sha); resolveError != nil {
return fmt.Errorf("Merge failed. (%w): %w", err, resolveError)
status, statusErr := git.GitStatus(DefaultGitPrj)
if statusErr != nil {
return fmt.Errorf("Failed to merge: %w . Status also failed: %w", err, statusErr)
}
// we can only resolve conflicts with .gitmodules
for _, s := range status {
if s.Status == GitStatus_Unmerged {
if s.Path != ".gitmodules" {
return err
}
submodules, err := git.GitSubmoduleList(DefaultGitPrj, "MERGE_HEAD")
if err != nil {
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
}
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
s3, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[2])
if err != nil {
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
}
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
if err != nil {
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
}
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
// this will update submodules
mergedSubs := slices.Concat(subs1, subs2, subs3)
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
nextSub:
for subName := range submodules {
for i := range mergedSubs {
if path.Base(mergedSubs[i].Path) == subName {
filteredSubs = append(filteredSubs, mergedSubs[i])
continue nextSub
}
}
return fmt.Errorf("Cannot find submodule for path: %s", subName)
}
out, err := os.Create(path.Join(git.GetPath(), DefaultGitPrj, ".gitmodules"))
if err != nil {
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
}
if err = WriteSubmodules(filteredSubs, out); err != nil {
return fmt.Errorf("Can't write .gitmodules: %w", err)
}
if out.Close(); err != nil {
return fmt.Errorf("Can't close .gitmodules: %w", err)
}
git.GitExecOrPanic(DefaultGitPrj, "add", ".gitmodules")
git.GitExecOrPanic(DefaultGitPrj, "-c", "core.editor=true", "merge", "--continue")
}
}
}
@@ -458,15 +492,7 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
if rs.IsPrjGitPR(prinfo.PR) {
continue
}
br := rs.Config.Branch
if len(br) == 0 {
// if branch is unspecified, take it from the PR as it
// matches default branch already
br = prinfo.PR.Base.Name
} else if br != prinfo.PR.Base.Name {
panic(prinfo.PR.Base.Name + " is expected to match " + br)
}
prinfo.RemoteName, err = git.GitClone(repo.Name, br, repo.SSHURL)
prinfo.RemoteName, err = git.GitClone(repo.Name, rs.Config.Branch, repo.SSHURL)
PanicOnError(err)
git.GitExecOrPanic(repo.Name, "fetch", prinfo.RemoteName, head.Sha)
git.GitExecOrPanic(repo.Name, "merge", "--ff", head.Sha)

View File

@@ -0,0 +1,48 @@
package common
import (
"errors"
"strings"
)
var UnknownParser error = errors.New("Cannot parse path")
type PRConflictResolver interface {
/*
stage_content -> { merge_base (stage1), head (stage2), merge_head (stage3) }
*/
Resolve(path string, stage_contents [3]string) error
}
var resolvers []PRConflictResolver = []PRConflictResolver{
&submodule_conflict_resolver{},
}
func ResolveMergeConflict(path string, file_contents [3]string) error {
for _, r := range resolvers {
if err := r.Resolve(path, file_contents); err != UnknownParser {
return err
}
}
return UnknownParser
}
type submodule_conflict_resolver struct{}
func (*submodule_conflict_resolver) Resolve(path string, stage [3]string) error {
if path != ".gitmodules" {
return UnknownParser
}
return UnknownParser
}
type changes_file_resolver struct{}
func (*changes_file_resolver) Resolve(path string, stage [3]string) error {
if !strings.HasSuffix(path, ".changes") {
return UnknownParser
}
return UnknownParser
}

View File

@@ -0,0 +1,10 @@
package common_test
import "testing"
func ResolveSubmoduleConflicts(t *testing.T) {
}
func ResolveChangesFileConflict(t *testing.T) {
}

View File

@@ -15,23 +15,22 @@ import (
"src.opensuse.org/autogits/common/gitea-generated/models"
mock_common "src.opensuse.org/autogits/common/mock"
)
/*
func TestCockpit(t *testing.T) {
common.SetLoggingLevel(common.LogLevelDebug)
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Fail to timeline", err)
}
t.Log(tl)
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Error:", err)
}
t.Error(r)
func TestCockpit(t *testing.T) {
common.SetLoggingLevel(common.LogLevelDebug)
gitea := common.AllocateGiteaTransport("https://src.opensuse.org")
tl, err := gitea.GetTimeline("cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Fail to timeline", err)
}
t.Log(tl)
r, err := common.FetchGiteaReviews(gitea, []string{}, "cockpit", "cockpit", 29)
if err != nil {
t.Fatal("Error:", err)
}
t.Error(r)
}
*/
func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
timeline := make([]*models.TimelineComment, len(reviews))
@@ -49,13 +48,11 @@ func reviewsToTimeline(reviews []*models.PullReview) []*models.TimelineComment {
}
func TestPR(t *testing.T) {
return
baseConfig := common.AutogitConfig{
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
Branch: "branch",
Organization: "foo",
GitProjectName: "foo/barPrj#master",
GitProjectName: "barPrj",
}
type prdata struct {
@@ -609,9 +606,9 @@ func TestPR(t *testing.T) {
*/
maintainers := mock_common.NewMockMaintainershipData(ctl)
maintainers.EXPECT().ListPackageMaintainers(gomock.Any(), gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(true).AnyTimes()
maintainers.EXPECT().ListPackageMaintainers(gomock.Any()).Return([]string{}).AnyTimes()
maintainers.EXPECT().ListProjectMaintainers().Return([]string{}).AnyTimes()
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any(), gomock.Any()).Return(true).AnyTimes()
if isApproved := res.IsApproved(review_mock, maintainers); isApproved != test.reviewed {
t.Error("expected reviewed to be NOT", isApproved)
@@ -621,7 +618,7 @@ func TestPR(t *testing.T) {
}
func TestPRAssignReviewers(t *testing.T) {
t.Skip("FAIL: unexpected calls, missing calls")
tests := []struct {
name string
config common.AutogitConfig
@@ -641,7 +638,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "No reviewers",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{},
@@ -651,7 +648,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "One project reviewer only",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1"},
@@ -661,7 +658,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "One project reviewer and one pkg reviewer only",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1", "user2"},
@@ -671,7 +668,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "No need to get reviews of submitter",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1", "submitter"},
@@ -681,7 +678,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "Reviews are done",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1", "user2"},
@@ -715,7 +712,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "Stale review is not done, re-request it",
config: common.AutogitConfig{
GitProjectName: "org/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1", "user2"},
@@ -747,7 +744,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "Stale optional review is not done, re-request it",
config: common.AutogitConfig{
GitProjectName: "prg/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{"-user1", "user2", "~bot"},
@@ -818,8 +815,8 @@ func TestPRAssignReviewers(t *testing.T) {
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
review_mock.EXPECT().GetTimeline("org", "repo", int64(42)).Return(test.prjTimeline, nil)
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo", gomock.Any()).Return([]string{"pkgmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo").Return([]string{"pkgmaintainer"}).AnyTimes()
prs, _ := common.FetchPRSet("test", pr_mock, "other", "pkgrepo", int64(1), &test.config)
if len(prs.PRs) != 2 {
@@ -855,7 +852,7 @@ func TestPRAssignReviewers(t *testing.T) {
{
name: "PrjMaintainers in prjgit review when not part of pkg set",
config: common.AutogitConfig{
GitProjectName: "org/repo#main",
GitProjectName: "repo",
Organization: "org",
Branch: "main",
Reviewers: []string{},
@@ -881,7 +878,7 @@ func TestPRAssignReviewers(t *testing.T) {
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
review_mock.EXPECT().GetTimeline("org", "repo", int64(1)).Return(nil, nil)
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
prs, _ := common.FetchPRSet("test", pr_mock, "org", "repo", int64(1), &test.config)
if len(prs.PRs) != 1 {
@@ -902,7 +899,6 @@ func TestPRAssignReviewers(t *testing.T) {
}
func TestPRMerge(t *testing.T) {
t.Skip("FAIL: No PrjGit PR found, missing calls")
repoDir := t.TempDir()
cwd, _ := os.Getwd()
@@ -927,7 +923,7 @@ func TestPRMerge(t *testing.T) {
config := &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/prj#master",
GitProjectName: "prj",
}
tests := []struct {
@@ -941,7 +937,6 @@ func TestPRMerge(t *testing.T) {
pr: &models.PullRequest{
Base: &models.PRBranchInfo{
Sha: "e8b0de43d757c96a9d2c7101f4bff404e322f53a1fa4041fb85d646110c38ad4", // "base_add_b1"
Name: "master",
Repo: &models.Repository{
Name: "prj",
Owner: &models.User{
@@ -962,7 +957,6 @@ func TestPRMerge(t *testing.T) {
pr: &models.PullRequest{
Base: &models.PRBranchInfo{
Sha: "4fbd1026b2d7462ebe9229a49100c11f1ad6555520a21ba515122d8bc41328a8",
Name: "master",
Repo: &models.Repository{
Name: "prj",
Owner: &models.User{
@@ -982,7 +976,6 @@ func TestPRMerge(t *testing.T) {
t.Run(test.name, func(t *testing.T) {
ctl := gomock.NewController(t)
mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
reviewUnrequestMock := mock_common.NewMockGiteaReviewUnrequester(ctl)
reviewUnrequestMock.EXPECT().UnrequestReview(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil)
@@ -1010,7 +1003,6 @@ func TestPRMerge(t *testing.T) {
}
func TestPRChanges(t *testing.T) {
t.Skip("FAIL: unexpected calls, missing calls")
tests := []struct {
name string
PRs []*models.PullRequest

View File

@@ -1,238 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"crypto/tls"
"fmt"
"net/url"
"strings"
"time"
rabbitmq "github.com/rabbitmq/amqp091-go"
)
type RabbitConnection struct {
RabbitURL *url.URL // amqps://user:password@host/queue
queueName string
ch *rabbitmq.Channel
topics []string
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
}
type RabbitProcessor interface {
GenerateTopics() []string
Connection() *RabbitConnection
ProcessRabbitMessage(msg RabbitMessage) error
}
type RabbitMessage rabbitmq.Delivery
func (l *RabbitConnection) ProcessTopicChanges() {
for {
topic, ok := <-l.topicSubChanges
if !ok {
return
}
LogDebug(" topic change:", topic)
switch topic[0] {
case '+':
if err := l.ch.QueueBind(l.queueName, topic[1:], "pubsub", false, nil); err != nil {
LogError(err)
}
case '-':
if err := l.ch.QueueUnbind(l.queueName, topic[1:], "pubsub", nil); err != nil {
LogError(err)
}
default:
LogInfo("Ignoring unknown topic change:", topic)
}
}
}
func (l *RabbitConnection) ProcessRabbitMQ(msgCh chan<- RabbitMessage) error {
queueName := l.RabbitURL.Path
l.RabbitURL.Path = ""
if len(queueName) > 0 && queueName[0] == '/' {
queueName = queueName[1:]
}
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
ServerName: l.RabbitURL.Hostname(),
})
if err != nil {
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
}
defer connection.Close()
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Cannot create a channel. Err: %w", err)
}
defer l.ch.Close()
if err = l.ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
}
var q rabbitmq.Queue
if len(queueName) == 0 {
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
} else {
q, err = l.ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("queue not found .. trying to create it:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
if err != nil {
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
}
q, err = l.ch.QueueDeclare(queueName, true, false, true, false, nil)
if err != nil {
LogInfo("can't create persistent queue ... falling back to temporaty queue:", err)
if l.ch.IsClosed() {
l.ch, err = connection.Channel()
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
}
q, err = l.ch.QueueDeclare("", false, true, true, false, nil)
}
}
}
if err != nil {
return fmt.Errorf("Cannot declare queue. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
LogDebug(" -- listening to topics:")
l.topicSubChanges = make(chan string)
defer close(l.topicSubChanges)
go l.ProcessTopicChanges()
for _, topic := range l.topics {
l.topicSubChanges <- "+" + topic
}
msgs, err := l.ch.Consume(q.Name, "", true, true, false, false, nil)
if err != nil {
return fmt.Errorf("Cannot start consumer. Err: %w", err)
}
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
for {
msg, ok := <-msgs
if !ok {
return fmt.Errorf("channel/connection closed?\n")
}
msgCh <- RabbitMessage(msg)
}
}
func (l *RabbitConnection) ConnectAndProcessRabbitMQ(ch chan<- RabbitMessage) {
defer func() {
if r := recover(); r != nil {
LogError(r)
LogError("'crash' RabbitMQ worker. Recovering... reconnecting...")
time.Sleep(5 * time.Second)
go l.ConnectAndProcessRabbitMQ(ch)
}
}()
for {
err := l.ProcessRabbitMQ(ch)
if err != nil {
LogError("Error in RabbitMQ connection:", err)
LogInfo("Reconnecting in 2 seconds...")
time.Sleep(2 * time.Second)
}
}
}
func (l *RabbitConnection) ConnectToRabbitMQ(processor RabbitProcessor) <-chan RabbitMessage {
LogInfo("RabbitMQ connection:", l.RabbitURL.String())
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
l.topics = processor.GenerateTopics()
ch := make(chan RabbitMessage, 100)
go l.ConnectAndProcessRabbitMQ(ch)
return ch
}
func (l *RabbitConnection) UpdateTopics(processor RabbitProcessor) {
newTopics := processor.GenerateTopics()
j := 0
next_new_topic:
for i := 0; i < len(newTopics); i++ {
topic := newTopics[i]
for j < len(l.topics) {
cmp := strings.Compare(topic, l.topics[j])
if cmp == 0 {
j++
continue next_new_topic
}
if cmp < 0 {
l.topicSubChanges <- "+" + topic
break
}
l.topicSubChanges <- "-" + l.topics[j]
j++
}
if j == len(l.topics) {
l.topicSubChanges <- "+" + topic
}
}
for j < len(l.topics) {
l.topicSubChanges <- "-" + l.topics[j]
j++
}
l.topics = newTopics
}
func ProcessRabbitMQEvents(processor RabbitProcessor) error {
ch := processor.Connection().ConnectToRabbitMQ(processor)
for {
msg, ok := <-ch
if !ok {
return nil
}
LogDebug("event:", msg.RoutingKey)
if err := processor.ProcessRabbitMessage(msg); err != nil {
LogError("Error processing", msg.RoutingKey, err)
}
}
}

View File

@@ -1,129 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"fmt"
"runtime/debug"
"slices"
"strings"
)
const RequestType_CreateBrachTag = "create"
const RequestType_DeleteBranchTag = "delete"
const RequestType_Fork = "fork"
const RequestType_Issue = "issues"
const RequestType_IssueAssign = "issue_assign"
const RequestType_IssueComment = "issue_comment"
const RequestType_IssueLabel = "issue_label"
const RequestType_IssueMilestone = "issue_milestone"
const RequestType_Push = "push"
const RequestType_Repository = "repository"
const RequestType_Release = "release"
const RequestType_PR = "pull_request"
const RequestType_PRAssign = "pull_request_assign"
const RequestType_PRLabel = "pull_request_label"
const RequestType_PRComment = "pull_request_comment"
const RequestType_PRMilestone = "pull_request_milestone"
const RequestType_PRSync = "pull_request_sync"
const RequestType_PRReviewAccepted = "pull_request_review_approved"
const RequestType_PRReviewRejected = "pull_request_review_rejected"
const RequestType_PRReviewRequest = "pull_request_review_request"
const RequestType_PRReviewComment = "pull_request_review_comment"
const RequestType_Status = "status"
const RequestType_Wiki = "wiki"
type RequestProcessor interface {
ProcessFunc(*Request) error
}
type RabbitMQGiteaEventsProcessor struct {
Handlers map[string]RequestProcessor
Orgs []string
c *RabbitConnection
}
func (gitea *RabbitMQGiteaEventsProcessor) Connection() *RabbitConnection {
if gitea.c == nil {
gitea.c = &RabbitConnection{}
}
return gitea.c
}
func (gitea *RabbitMQGiteaEventsProcessor) GenerateTopics() []string {
topics := make([]string, 0, len(gitea.Handlers)*len(gitea.Orgs))
scope := "suse"
if gitea.c.RabbitURL.Hostname() == "rabbit.opensuse.org" {
scope = "opensuse"
}
for _, org := range gitea.Orgs {
for requestType, _ := range gitea.Handlers {
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
}
}
slices.Sort(topics)
return slices.Compact(topics)
}
func (gitea *RabbitMQGiteaEventsProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
route := strings.Split(msg.RoutingKey, ".")
if len(route) > 3 {
reqType := route[3]
org := route[2]
if !slices.Contains(gitea.Orgs, org) {
LogInfo("Got event for unhandeled org:", org)
return nil
}
LogDebug("org:", org, "type:", reqType)
if handler, found := gitea.Handlers[reqType]; found {
req, err := ParseRequestJSON(reqType, msg.Body)
if err != nil {
LogError("Error parsing request JSON:", err)
} else {
LogDebug("processing req", req.Type)
// h.Request = req
ProcessEvent(handler, req)
}
return nil
}
}
return fmt.Errorf("Invalid routing key: %s", route)
}
func ProcessEvent(f RequestProcessor, request *Request) {
defer func() {
if r := recover(); r != nil {
LogError("panic caught")
if err, ok := r.(error); !ok {
LogError(err)
}
LogError(string(debug.Stack()))
}
}()
if err := f.ProcessFunc(request); err != nil {
LogError(err)
}
}

View File

@@ -1,22 +0,0 @@
package common
type RabbitMQObsBuildStatusProcessor struct {
c *RabbitConnection
}
func (o *RabbitMQObsBuildStatusProcessor) GenerateTopics() []string {
return []string{}
}
func (o *RabbitMQObsBuildStatusProcessor) Connection() *RabbitConnection {
if o.c == nil {
o.c = &RabbitConnection{}
}
return o.c
}
func (o *RabbitMQObsBuildStatusProcessor) ProcessRabbitMessage(msg RabbitMessage) error {
return nil
}

View File

@@ -1,62 +0,0 @@
package common
/*
* This file is part of Autogits.
*
* Copyright © 2024 SUSE LLC
*
* Autogits is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 2 of the License, or (at your option) any later
* version.
*
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Foobar. If not, see <https://www.gnu.org/licenses/>.
*/
import (
"encoding/json"
"fmt"
"io"
)
type Status struct {
}
type StatusWebhookEvent struct {
Id uint64
Context string
Description string
Sha string
State string
TargetUrl string
Commit Commit
Repository Repository
Sender *User
}
func (s *StatusWebhookEvent) GetAction() string {
return s.State
}
func (h *RequestHandler) ParseStatusRequest(data io.Reader) (*StatusWebhookEvent, error) {
action := new(StatusWebhookEvent)
err := json.NewDecoder(data).Decode(&action)
if err != nil {
return nil, fmt.Errorf("Got error while parsing: %w", err)
}
h.StdLogger.Printf("Request status for repo: %s#%s\n", action.Repository.Full_Name, action.Sha)
h.Request = &Request{
Type: RequestType_Status,
Data: action,
}
return action, nil
}

View File

@@ -1,40 +0,0 @@
package common_test
import (
"os"
"strings"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusRequestParsing(t *testing.T) {
t.Run("parsing repo creation message", func(t *testing.T) {
var h common.RequestHandler
h.StdLogger, h.ErrLogger = common.CreateStdoutLogger(os.Stdout, os.Stdout)
json, err := h.ParseStatusRequest(strings.NewReader(requestStatusJSON))
if err != nil {
t.Fatalf("Can't parse struct: %s", err)
}
if json.GetAction() != "pending" {
t.Fatalf("json.action is '%#v'", json)
}
if json.Repository.Full_Name != "autogits/nodejs-common" ||
json.Repository.Parent == nil ||
json.Repository.Parent.Parent != nil ||
len(json.Repository.Ssh_Url) < 10 ||
json.Repository.Default_Branch != "factory" ||
json.Repository.Object_Format_Name != "sha256" {
t.Fatalf("invalid repository parse: %#v", json.Repository)
}
if json.Sha != "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e" {
t.Fatal("Invalid SHA:", json.Sha)
}
})
}

View File

@@ -1,17 +0,0 @@
package common
import (
"slices"
)
func (group *ReviewGroup) ExpandMaintainers(maintainers []string) []string {
idx := slices.Index(maintainers, group.Name)
if idx == -1 {
return maintainers
}
expandedMaintainers := slices.Replace(maintainers, idx, idx+1, group.Reviewers...)
slices.Sort(expandedMaintainers)
return slices.Compact(expandedMaintainers)
}

View File

@@ -1,62 +0,0 @@
package common_test
import (
"slices"
"testing"
"src.opensuse.org/autogits/common"
)
func TestMaintainerGroupReplacer(t *testing.T) {
GroupName := "my_group"
tests := []struct {
name string
reviewers []string
group_members []string
output []string
}{
{
name: "empty",
},
{
name: "group not maintainer",
reviewers: []string{"a", "b"},
group_members: []string{"g1", "g2"},
output: []string{"a", "b"},
},
{
name: "group maintainer",
reviewers: []string{"b", "my_group"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
{
name: "sorted group maintainer",
reviewers: []string{"my_group", "b"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
{
name: "group maintainer dedup",
reviewers: []string{"my_group", "g2", "b"},
group_members: []string{"g1", "g2"},
output: []string{"b", "g1", "g2"},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
g := &common.ReviewGroup{
Name: GroupName,
Reviewers: test.group_members,
}
expandedList := g.ExpandMaintainers(test.reviewers)
if slices.Compare(expandedList, test.output) != 0 {
t.Error("Expected:", test.output, "but have", expandedList)
}
})
}
}

View File

@@ -128,16 +128,6 @@ func (r *PRReviews) IsApproved() bool {
return goodReview
}
func (r *PRReviews) MissingReviews() []string {
missing := []string{}
for _, reviewer := range r.reviewers {
if !r.IsReviewedBy(reviewer) {
missing = append(missing, reviewer)
}
}
return missing
}
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
if !slices.Contains(r.reviewers, reviewer) {
return false

View File

@@ -113,10 +113,6 @@ func (s *Submodule) parseKeyValue(line string) error {
return nil
}
func (s *Submodule) ManifestSubmodulePath(manifest *Manifest) string {
return manifest.SubdirForPackage(s.Path)
}
func ParseSubmodulesFile(reader io.Reader) ([]Submodule, error) {
data, err := io.ReadAll(reader)
if err != nil {

View File

@@ -40,24 +40,10 @@ create_prjgit_sample() {
git submodule -q add ../pkgB2 pkgB2
git commit -q -m "pkgB2 added"
git checkout -b base_rm_c main
git checkout main
git clean -ffxd
git rm pkgC
git commit -q -m 'pkgC removed'
git checkout -b base_modify_c main
git submodule update --init pkgC
pushd pkgC
echo "mofieid" >> README.md
git commit -q -m "modified" README.md
popd
git commit pkgC -m "modifiedC"
git submodule deinit -f pkgC
# git checkout main
# git clean -ffxd
# git submodule -q add -f ../pkgB1 pkgB1
# git commit -q -m "main adding pkgB1"
git submodule -q add -f ../pkgB1 pkgB1
git commit -q -m "main adding pkgB1"
popd
}

View File

@@ -1,10 +1,6 @@
#!/usr/bin/bash
git init -q --bare --object-format=sha256
git config user.email test@example.com
git config user.name Test
export GIT_AUTHOR_DATE=2025-10-27T14:20:07+01:00
export GIT_COMMITTER_DATE=2025-10-27T14:20:07+01:00
# 81aba862107f1e2f5312e165453955485f424612f313d6c2fb1b31fef9f82a14
blobA=$(echo "help" | git hash-object --stdin -w)

View File

@@ -54,10 +54,6 @@ func TranslateHttpsToSshUrl(url string) (string, error) {
url2_len = len(url2)
)
if len(url) > 10 && (url[0:10] == "gitea@src." || url[0:10] == "ssh://gite") {
return url, nil
}
if len(url) > url1_len && url[0:url1_len] == url1 {
return "ssh://gitea@src.opensuse.org/" + url[url1_len:], nil
}
@@ -136,7 +132,7 @@ func PRtoString(pr *models.PullRequest) string {
return "(null)"
}
return fmt.Sprintf("%s/%s!%d", pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
return fmt.Sprintf("%s/%s#%d", pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
}
type DevelProject struct {

View File

@@ -1,8 +0,0 @@
all: ../workflow-direct/workflow-direct
cp ../workflow-direct/workflow-direct workflow-direct
podman build --pull=always -t workflow-direct workflow-direct
pr:
cp ../workflow-pr/workflow-pr workflow-pr
podman build --pull=always -t workflow-pr workflow-pr

View File

@@ -1 +0,0 @@
workflow-direct

View File

@@ -1,14 +0,0 @@
FROM registry.suse.com/bci/bci-base
RUN zypper install -y openssh-clients git-core
RUN mkdir /root/.ssh
RUN mkdir /repos
RUN ln -s /data/workflow-direct.key /root/.ssh/id_ed25519
RUN ln -s /data/workflow-direct.key.pub /root/.ssh/id_ed25519.pub
ADD known_hosts /root/.ssh/known_hosts
ADD workflow-direct /srv/workflow-direct
ENV AMQP_USERNAME=opensuse
ENV AMQP_PASSWORD=opensuse
VOLUME /data
VOLUME /repos
ENTRYPOINT /srv/workflow-direct -config /data/config.json -repo-path /repos -debug -check-on-start

View File

@@ -1,4 +0,0 @@
src.opensuse.org,195.135.223.224 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDJ8V51MVIFUkQqQOdHwC3SP9NPqp1ZWYoEbcjvZ7HhSFi2XF8ALo/h1Mk+q8kT2O75/goeTsKFbcU8zrYFeOh0=
src.opensuse.org,195.135.223.224 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCkVeXePin0haffC085V2L0jvILfwbB2Mt1fpVe21QAOcWNM+/jOC5RwtWweV/LigHImB39/KvkuPa9yLoDf+eLhdZQckSSauRfDjxtlKeFLPrfJKSA0XeVJT3kJcOvDT/3ANFhYeBbAUBTAeQt5bi2hHC1twMPbaaEdJ2jiMaIBztFf6aE9K58uoS+7Y2tTv87Mv/7lqoBW6BFMoDmjQFWgjik6ZMCvIM/7bj7AgqHk/rjmr5zKS4ag5wtHtYLm1L3LBmHdj7d0VFsOpPQexIOEnnjzKqlwmAxT6eYJ/t3qgBlT8KRfshBFgEuUZ5GJOC7TOne4PfB0bboPMZzIRo3WE9dPGRR8kAIme8XqhFbmjdJ+WsTjg0Lj+415tIbyRQoNkLtawrJxozvevs6wFEFcA/YG6o03Z577tiLT3WxOguCcD5vrALH48SyZb8jDUtcVgTWMW0to/n63S8JGUNyF7Bkw9HQWUx+GO1cv2GNzKpk22KS5dlNUVGE9E/7Ydc=
src.opensuse.org,195.135.223.224 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKNThLRPznU5Io1KrAYHmYpaoLQEMGM9nwpKyYQCkPx

View File

@@ -1 +0,0 @@
workflow-pr

View File

@@ -1,14 +0,0 @@
FROM registry.suse.com/bci/bci-base
RUN zypper install -y openssh-clients git-core
RUN mkdir /root/.ssh
RUN mkdir /repos
RUN ln -s /data/workflow-pr.key /root/.ssh/id_ed25519
RUN ln -s /data/workflow-pr.key.pub /root/.ssh/id_ed25519.pub
ADD known_hosts /root/.ssh/known_hosts
ADD workflow-pr /srv/workflow-pr
ENV AMQP_USERNAME=opensuse
ENV AMQP_PASSWORD=opensuse
VOLUME /data
VOLUME /repos
ENTRYPOINT /srv/workflow-pr -config /data/config.json -repo-path /repos -debug -check-on-start

View File

@@ -1,4 +0,0 @@
src.opensuse.org,195.135.223.224 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDJ8V51MVIFUkQqQOdHwC3SP9NPqp1ZWYoEbcjvZ7HhSFi2XF8ALo/h1Mk+q8kT2O75/goeTsKFbcU8zrYFeOh0=
src.opensuse.org,195.135.223.224 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCkVeXePin0haffC085V2L0jvILfwbB2Mt1fpVe21QAOcWNM+/jOC5RwtWweV/LigHImB39/KvkuPa9yLoDf+eLhdZQckSSauRfDjxtlKeFLPrfJKSA0XeVJT3kJcOvDT/3ANFhYeBbAUBTAeQt5bi2hHC1twMPbaaEdJ2jiMaIBztFf6aE9K58uoS+7Y2tTv87Mv/7lqoBW6BFMoDmjQFWgjik6ZMCvIM/7bj7AgqHk/rjmr5zKS4ag5wtHtYLm1L3LBmHdj7d0VFsOpPQexIOEnnjzKqlwmAxT6eYJ/t3qgBlT8KRfshBFgEuUZ5GJOC7TOne4PfB0bboPMZzIRo3WE9dPGRR8kAIme8XqhFbmjdJ+WsTjg0Lj+415tIbyRQoNkLtawrJxozvevs6wFEFcA/YG6o03Z577tiLT3WxOguCcD5vrALH48SyZb8jDUtcVgTWMW0to/n63S8JGUNyF7Bkw9HQWUx+GO1cv2GNzKpk22KS5dlNUVGE9E/7Ydc=
src.opensuse.org,195.135.223.224 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKNThLRPznU5Io1KrAYHmYpaoLQEMGM9nwpKyYQCkPx

View File

@@ -2,4 +2,3 @@ devel-importer
Factory
git
git-migrated
git-importer

View File

@@ -1,202 +0,0 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
use JSON;
sub FindFactoryCommit {
my ($package) = @_;
# Execute osc cat and capture output
my $osc_cmd = "osc cat openSUSE:Factory $package $package.changes";
open( my $osc_fh, "$osc_cmd |" ) or die "Failed to run osc: $!";
my $data = do { local $/; <$osc_fh> };
close($osc_fh);
# Calculate size
my $size = length($data);
# Create blob header
my $blob = "blob $size\0$data";
# Open a pipe to openssl to compute the hash
my ( $reader, $writer );
my $pid = open2( $reader, $writer, "openssl sha256" );
# Send blob data
print $writer $blob;
close $writer;
# Read the hash result and extract it
my $hash_line = <$reader>;
waitpid( $pid, 0 );
my ($hash) = $hash_line =~ /([a-fA-F0-9]{64})/;
# Run git search command with the hash
print("looking for hash: $hash\n");
my @hashes;
my $git_cmd =
"git -C $package rev-list --all pool/HEAD | while read commit; do git -C $package ls-tree \"\$commit\" | grep -q '^100644 blob $hash' && echo \"\$commit\"; done";
open( my $git_fh, "$git_cmd |" ) or die "Failed to run git search: $!";
while ( my $commit = <$git_fh> ) {
chomp $commit;
print "Found commit $commit\n";
push( @hashes, $commit );
}
close($git_fh);
return @hashes;
}
sub ListPackages {
my ($project) = @_;
open( my $osc_fh,
"curl -s https://src.opensuse.org/openSUSE/Factory/raw/branch/main/pkgs/_meta/devel_packages | awk '{ if ( \$2 == \"$project\" ) print \$1 }' |" )
or die "Failed to run curl: $!";
my @packages = <$osc_fh>;
chomp @packages;
close($osc_fh);
return @packages;
}
# Read project from first argument
sub Usage {
die "Usage: $0 <OBS Project> [org [package]]";
}
my $project = shift or Usage();
my $org = shift;
if (not defined($org)) {
$org = `osc meta prj $project | grep scmsync | sed -e 's,^.*src.opensuse.org/\\(.*\\)/_ObsPrj.*,\\1,'`;
chomp($org);
}
my @packages = ListPackages($project);
my $pkg = shift;
@packages = ($pkg) if defined $pkg;
my @tomove;
my @toremove;
if ( ! -e $org ) {
mkdir($org);
}
chdir($org);
print "Verify packages in /pool for $org package in $project\n";
my $super_user = $ENV{SUPER};
if (defined($super_user)) {
$super_user = "-G $super_user";
} else {
$super_user = "";
}
my @missing;
# verify that packages in devel project is a fork from pool.
for my $pkg ( sort(@packages) ) {
my $data = `git obs api /repos/$org/$pkg 2> /dev/null`;
if ( length($data) == 0 ) {
print "***** Repo missing in $org: $pkg\n";
push(@missing, $pkg);
next;
}
else {
my $repo = decode_json($data);
if ( !$repo->{parent}
|| $repo->{parent}->{owner}->{username} ne "pool" )
{
if ( system("git obs api /repos/pool/$pkg > /dev/null 2> /dev/null") == 0 ) {
print "=== $pkg NOT A FORK of exiting package\n";
push( @toremove, $pkg );
}
else {
print "$pkg NEEDS transfer\n";
push( @tomove, $pkg );
}
}
}
}
if ( scalar @missing > 0 ) {
for my $pkg (@missing) {
my $index = 0;
$index++ until $packages[$index] eq $pkg;
splice(@packages, $index, 1);
}
}
if ( scalar @toremove > 0 ) {
print "ABORTING. Need repos removed.\n";
print "@toremove\n";
exit(1);
}
if ( scalar @tomove > 0 ) {
for my $pkg (@tomove) {
system("git obs $super_user api -X POST --data '{\"reparent\": true, \"organization\": \"pool\"}' /repos/$org/$pkg/forks") == 0 and
system("git clone gitea\@src.opensuse.org:pool/$pkg") == 0 and
system("git -C $pkg checkout -B factory HEAD") == 0 and
system("git -C $pkg push origin factory") == 0 and
system("git obs $super_user api -X PATCH --data '{\"default_branch\": \"factory\"}' /repos/pool/$pkg") == 0
or die "Error in creating a pool repo";
system("for i in \$(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes/origin/ | grep -v '\\(^HEAD\$\\|^factory\$\)'); do git -C $pkg push origin :\$i; done") == 0 or die "failed to cull branches";
}
}
print "Verify complete.\n";
for my $package ( sort(@packages) ) {
print " ----- PROCESSING $package\n";
my $url = "https://src.opensuse.org/$org/$package.git";
my $push_url = "gitea\@src.opensuse.org:pool/$package.git";
if ( not -e $package ) {
print("cloning...\n");
system("git clone --origin pool $url") == 0
or die "Can't clone $org/$package";
}
else {
print("adding remote...\n");
system("git -C $package remote rm pool > /dev/null");
system("git -C $package remote add pool $url") == 0
or die "Can't add pool for $package";
}
system("git -C $package remote set-url pool --push $push_url") == 0
or die "Can't add push remote for $package";
print("fetching remote...\n");
system("git -C $package fetch pool") == 0
or ( push( @tomove, $package ) and die "Can't fetch pool for $package" );
my @commits = FindFactoryCommit($package);
my $c;
my $match = 0;
for my $commit (@commits) {
if ( length($commit) != 64 ) {
print("Failed to find factory commit. Aborting.");
exit(1);
}
if (
system("git -C $package lfs fetch pool $commit") == 0
and system("git -C $package checkout -B factory $commit") == 0
and system("git -C $package lfs checkout") == 0
and system(
"cd $package; osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | md5sum -c --quiet"
) == 0
and system("bash -c \"diff <(ls -1 $package | sort) <(osc ls openSUSE:Factory $package | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | sort)\"") == 0
)
{
$c = $commit;
$match = 1;
last;
}
}
if ( !$match ) {
die "Match not found. Aborting.";
}
system ("git -C $package push -f pool factory");
print "$package: $c\n";
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,12 +6,10 @@ devel:languages:erlang:Factory
devel:languages:hare
devel:languages:javascript
devel:languages:lua
devel:languages:perl
devel:openSUSE:Factory
network:dhcp
network:im:whatsapp
network:messaging:xmpp
server:dns
systemsmanagement:cockpit
systemsmanagement:wbem
X11:lxde

View File

@@ -298,22 +298,6 @@ func parseRequestJSONOrg(reqType string, data []byte) (org *common.Organization,
org = pr.Repository.Owner
extraAction = ""
case common.RequestType_Status:
status := common.StatusWebhookEvent{}
if err = json.Unmarshal(data, &status); err != nil {
return
}
switch status.State {
case "pending", "success", "error", "failure":
break
default:
err = fmt.Errorf("Unknown Status' state: %s", status.State)
return
}
org = status.Repository.Owner
extraAction = status.State
case common.RequestType_Wiki:
wiki := common.WikiWebhookEvent{}
if err = json.Unmarshal(data, &wiki); err != nil {

View File

@@ -1,46 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"io"
"os"
"github.com/tailscale/hujson"
)
type Config struct {
ForgeEndpoint string `json:"forge_url"`
Keys []string `json:"keys"`
}
type contextKey string
const configKey contextKey = "config"
func ReadConfig(reader io.Reader) (*Config, error) {
data, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("error reading config data: %w", err)
}
config := Config{}
data, err = hujson.Standardize(data)
if err != nil {
return nil, fmt.Errorf("failed to parse json: %w", err)
}
if err := json.Unmarshal(data, &config); err != nil {
return nil, fmt.Errorf("error parsing json to api keys and target url: %w", err)
}
return &config, nil
}
func ReadConfigFile(filename string) (*Config, error) {
file, err := os.Open(filename)
if err != nil {
return nil, fmt.Errorf("cannot open config file for reading. err: %w", err)
}
defer file.Close()
return ReadConfig(file)
}

View File

@@ -1,15 +0,0 @@
package main
import (
"context"
"net/http"
)
func ConfigMiddleWare(cfg *Config) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := context.WithValue(r.Context(), configKey, cfg)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
}

View File

@@ -1,169 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"net/http"
"os"
"slices"
"strings"
"src.opensuse.org/autogits/common"
)
type Status struct {
Context string `json:"context"`
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
type StatusInput struct {
State string `json:"state"`
TargetUrl string `json:"target_url"`
}
func main() {
configFile := flag.String("config", "", "status proxy config file")
flag.Parse()
if *configFile == "" {
common.LogError("missing required argument config")
return
}
config, err := ReadConfigFile(*configFile)
if err != nil {
common.LogError("Failed to read config file", err)
return
}
mux := http.NewServeMux()
mux.Handle("/repos/{owner}/{repo}/statuses/{sha}", ConfigMiddleWare(config)(http.HandlerFunc(StatusProxy)))
common.LogInfo("server up and listening on :3000")
err = http.ListenAndServe(":3000", mux)
if err != nil {
common.LogError("Server failed to start up", err)
}
}
func StatusProxy(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
config, ok := r.Context().Value(configKey).(*Config)
if !ok {
common.LogError("Config missing from context")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
header := r.Header.Get("Authorization")
if header == "" {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token_arr := strings.Split(header, " ")
if len(token_arr) != 2 {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
if !strings.EqualFold(token_arr[0], "Bearer") {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
token := token_arr[1]
if !slices.Contains(config.Keys, token) {
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
return
}
owner := r.PathValue("owner")
repo := r.PathValue("repo")
sha := r.PathValue("sha")
if !ok {
common.LogError("Failed to get config from context, is it set?")
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
}
posturl := fmt.Sprintf("%s/repos/%s/%s/statuses/%s", config.ForgeEndpoint, owner, repo, sha)
decoder := json.NewDecoder(r.Body)
var statusinput StatusInput
err := decoder.Decode(&statusinput)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
status := Status{
Context: "Build in obs",
State: statusinput.State,
TargetUrl: statusinput.TargetUrl,
}
status_payload, err := json.Marshal(status)
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
client := &http.Client{}
req, err := http.NewRequest("POST", posturl, bytes.NewBuffer(status_payload))
if err != nil {
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
return
}
ForgeToken := os.Getenv("GITEA_TOKEN")
if ForgeToken == "" {
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
common.LogError("GITEA_TOKEN was not set, all requests will fail")
return
}
req.Header.Add("Content-Type", "Content-Type")
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
resp, err := client.Do(req)
if err != nil {
common.LogError(fmt.Sprintf("Request to forge endpoint failed: %v", err))
http.Error(w, http.StatusText(http.StatusBadGateway), http.StatusBadGateway)
return
}
defer resp.Body.Close()
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(resp.StatusCode)
/*
the commented out section sets every key
value from the headers, unsure if this
leaks information from gitea
for k, v := range resp.Header {
for _, vv := range v {
w.Header().Add(k, vv)
}
}
*/
_, err = io.Copy(w, resp.Body)
if err != nil {
common.LogError("Error copying response body: %v", err)
}
} else {
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
return
}
}

5
go.mod
View File

@@ -10,16 +10,12 @@ require (
github.com/go-openapi/validate v0.24.0
github.com/opentracing/opentracing-go v1.2.0
github.com/rabbitmq/amqp091-go v1.10.0
github.com/redis/go-redis/v9 v9.11.0
github.com/tailscale/hujson v0.0.0-20250226034555-ec1d1c113d33
go.uber.org/mock v0.5.0
gopkg.in/yaml.v3 v3.0.1
)
require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
@@ -37,4 +33,5 @@ require (
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
golang.org/x/sync v0.7.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

10
go.sum
View File

@@ -1,16 +1,8 @@
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -58,8 +50,6 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=

View File

@@ -15,23 +15,6 @@ Target Usage
Projects where policy reviews are required.
Configiuration
--------------
Groups are defined in the workflow.config inside the project git. They take following options,
{
...
ReviewGroups: [
{
"Name": "name of the group user",
"Reviewers": ["members", "of", "group"],
"Silent": (true, false) -- if true, do not explicitly require review requests of group members
},
],
...
}
Requirements
------------
* Gitea token to:

View File

@@ -11,7 +11,6 @@ import (
"strconv"
"strings"
"time"
"unicode"
"src.opensuse.org/autogits/common"
"src.opensuse.org/autogits/common/gitea-generated/models"
@@ -22,10 +21,9 @@ var acceptRx *regexp.Regexp
var rejectRx *regexp.Regexp
var groupName string
func InitRegex(newGroupName string) {
groupName = newGroupName
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
rejectRx = regexp.MustCompile("^:\\s*")
func InitRegex(groupName string) {
acceptRx = regexp.MustCompile("\\s*:\\s*LGTM")
rejectRx = regexp.MustCompile("\\s*:\\s*")
}
func ParseReviewLine(reviewText string) (bool, string) {
@@ -36,18 +34,7 @@ func ParseReviewLine(reviewText string) (bool, string) {
return false, line
}
l := line[glen:]
for idx, r := range l {
if unicode.IsSpace(r) {
continue
} else if r == ':' {
return true, l[idx:]
} else {
return false, line
}
}
return false, line
return true, line[glen:]
}
func ReviewAccepted(reviewText string) bool {
@@ -125,16 +112,6 @@ func FindAcceptableReviewInTimeline(user string, timeline []*models.TimelineComm
return nil
}
func FindOurLastReviewInTimeline(timeline []*models.TimelineComment) *models.TimelineComment {
for _, t := range timeline {
if t.Type == common.TimelineCommentType_Review && t.User.UserName == groupName && t.Created == t.Updated {
return t
}
}
return nil
}
func UnrequestReviews(gitea common.Gitea, org, repo string, id int64, users []string) {
if err := gitea.UnrequestReview(org, repo, id, users...); err != nil {
common.LogError("Can't remove reviewrs after a review:", err)
@@ -167,29 +144,13 @@ func ProcessNotifications(notification *models.NotificationThread, gitea common.
repo := match[2]
id, _ := strconv.ParseInt(match[3], 10, 64)
common.LogInfo("processing:", fmt.Sprintf("%s/%s!%d", org, repo, id))
common.LogInfo("processing:", fmt.Sprintf("%s/%s#%d", org, repo, id))
pr, err := gitea.GetPullRequest(org, repo, id)
if err != nil {
common.LogError(" ** Cannot fetch PR associated with review:", subject.URL, "Error:", err)
return
}
if err := ProcessPR(pr); err == nil && !common.IsDryRun {
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
} else if err != nil && err != ReviewNotFinished {
common.LogError(err)
}
}
var ReviewNotFinished = fmt.Errorf("Review is not finished")
func ProcessPR(pr *models.PullRequest) error {
org := pr.Base.Repo.Owner.UserName
repo := pr.Base.Repo.Name
id := pr.Index
found := false
for _, reviewer := range pr.RequestedReviewers {
if reviewer != nil && reviewer.UserName == groupName {
@@ -199,36 +160,45 @@ func ProcessPR(pr *models.PullRequest) error {
}
if !found {
common.LogInfo(" review is not requested for", groupName)
return nil
if !common.IsDryRun {
gitea.SetNotificationRead(notification.ID)
}
return
}
config := configs.GetPrjGitConfig(org, repo, pr.Base.Name)
if config == nil {
return fmt.Errorf("Cannot find config for: %s", pr.URL)
common.LogError("Cannot find config for:", fmt.Sprintf("%s/%s#%s", org, repo, pr.Base.Name))
return
}
if pr.State == "closed" {
// dismiss the review
common.LogInfo(" -- closed request, so nothing to review")
return nil
if !common.IsDryRun {
gitea.SetNotificationRead(notification.ID)
}
return
}
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
if err != nil {
return fmt.Errorf("Failed to fetch reviews for: %v: %w", pr.URL, err)
common.LogInfo(" ** No reviews associated with request:", subject.URL, "Error:", err)
return
}
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(gitea, groupName, pr.Head.Sha, org, repo, id)
if err != nil {
return fmt.Errorf("Failed to fetch timeline to review. %w", err)
common.LogError(err)
return
}
groupConfig, err := config.GetReviewGroup(groupName)
requestReviewers, err := config.GetReviewGroupMembers(groupName)
if err != nil {
return fmt.Errorf("Failed to fetch review group. %w", err)
common.LogError(err)
return
}
// submitter cannot be reviewer
requestReviewers := slices.Clone(groupConfig.Reviewers)
requestReviewers = slices.DeleteFunc(requestReviewers, func(u string) bool { return u == pr.User.UserName })
// pr.Head.Sha
@@ -236,38 +206,34 @@ func ProcessPR(pr *models.PullRequest) error {
if review := FindAcceptableReviewInTimeline(reviewer, timeline, reviews); review != nil {
if ReviewAccepted(review.Body) {
if !common.IsDryRun {
text := reviewer + " approved a review on behalf of " + groupName
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, text)
if err != nil {
common.LogError(" -> failed to write approval comment", err)
gitea.AddReviewComment(pr, common.ReviewStateApproved, "Signed off by: "+reviewer)
UnrequestReviews(gitea, org, repo, id, requestReviewers)
if !common.IsDryRun {
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
UnrequestReviews(gitea, org, repo, id, requestReviewers)
}
}
common.LogInfo(" -> approved by", reviewer)
common.LogInfo(" review at", review.Created)
return nil
return
} else if ReviewRejected(review.Body) {
if !common.IsDryRun {
text := reviewer + " requested changes on behalf of " + groupName + ". See " + review.HTMLURL
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Changes requested. See review by: "+reviewer)
if err != nil {
common.LogError(" -> failed to write rejecting comment", err)
}
UnrequestReviews(gitea, org, repo, id, requestReviewers)
gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Changes requested. See review by: "+reviewer)
UnrequestReviews(gitea, org, repo, id, requestReviewers)
if err := gitea.SetNotificationRead(notification.ID); err != nil {
common.LogDebug(" Cannot set notification as read", err)
}
}
common.LogInfo(" -> declined by", reviewer)
return nil
return
}
}
}
// request group member reviews, if missing
common.LogDebug(" Review incomplete...")
if !groupConfig.Silent && len(requestReviewers) > 0 {
if len(requestReviewers) > 0 {
common.LogDebug(" Requesting reviews for:", requestReviewers)
if !common.IsDryRun {
if _, err := gitea.RequestReviews(pr, requestReviewers...); err != nil {
@@ -290,22 +256,12 @@ func ProcessPR(pr *models.PullRequest) error {
}
if !found_help_comment && !common.IsDryRun {
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
"Do **not** use standard review interface to review on behalf of the group.\n"+
"To accept the review on behalf of the group, create the following comment: `@"+groupName+": approve`.\n"+
"To request changes on behalf of the group, create the following comment: `@"+groupName+": decline` followed with lines justifying the decision.\n"+
"Future edits of the comments are ignored, a new comment is required to change the review state.")
if slices.Contains(groupConfig.Reviewers, pr.User.UserName) {
helpComment = helpComment + "\n\n" +
"Submitter is member of this review group, hence they are excluded from being one of the reviewers here"
}
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ". To review as part of this group, create a comment with contents @"+groupName+": LGTM on a separate line to accept a review. To request changes, write @"+groupName+": followed by reason for rejection. Do not use reviews to review as a group. Editing a comment invalidates that comment.")
gitea.AddComment(pr, helpComment)
}
return ReviewNotFinished
}
func PeriodReviewCheck() {
func PeriodReviewCheck(gitea common.Gitea) {
notifications, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
if err != nil {
common.LogError(" Error fetching unread notifications: %w", err)
@@ -314,15 +270,14 @@ func PeriodReviewCheck() {
for _, notification := range notifications {
ProcessNotifications(notification, gitea)
}
}
var gitea common.Gitea
func main() {
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance used for reviews")
rabbitMqHost := flag.String("rabbit-url", "amqps://rabbit.opensuse.org", "RabbitMQ instance where Gitea webhook notifications are sent")
interval := flag.Int64("interval", 10, "Notification polling interval in minutes (min 1 min)")
interval := flag.Int64("interval", 5, "Notification polling interval in minutes (min 1 min)")
configFile := flag.String("config", "", "PrjGit listing config file")
logging := flag.String("logging", "info", "Logging level: [none, error, info, debug]")
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry run, no effect. For debugging")
@@ -359,7 +314,7 @@ func main() {
return
}
gitea = common.AllocateGiteaTransport(*giteaUrl)
gitea := common.AllocateGiteaTransport(*giteaUrl)
configs, err = common.ResolveWorkflowConfigs(gitea, configData)
if err != nil {
common.LogError("Cannot parse workflow configs:", err)
@@ -403,22 +358,19 @@ func main() {
config_modified: make(chan *common.AutogitConfig),
}
process_issue_pr := IssueCommentProcessor{}
configUpdates := &common.RabbitMQGiteaEventsProcessor{
Orgs: []string{},
configUpdates := &common.ListenDefinitions{
RabbitURL: u,
Orgs: []string{},
Handlers: map[string]common.RequestProcessor{
common.RequestType_Push: &config_update,
common.RequestType_IssueComment: &process_issue_pr,
common.RequestType_Push: &config_update,
},
}
configUpdates.Connection().RabbitURL = u
for _, c := range configs {
if org, _, _ := c.GetPrjGit(); !slices.Contains(configUpdates.Orgs, org) {
configUpdates.Orgs = append(configUpdates.Orgs, org)
}
}
go common.ProcessRabbitMQEvents(configUpdates)
go configUpdates.ProcessRabbitMQEvents()
for {
config_update_loop:
@@ -446,7 +398,7 @@ func main() {
}
}
PeriodReviewCheck()
PeriodReviewCheck(gitea)
time.Sleep(time.Duration(*interval * int64(time.Minute)))
}
}

View File

@@ -2,76 +2,6 @@ package main
import "testing"
func TestReviewApprovalCheck(t *testing.T) {
tests := []struct {
Name string
GroupName string
InString string
Approved bool
Rejected bool
}{
{
Name: "Empty String",
GroupName: "group",
InString: "",
},
{
Name: "Random Text",
GroupName: "group",
InString: "some things LGTM",
},
{
Name: "Group name with Random Text means disapproval",
GroupName: "group",
InString: "@group: some things LGTM",
Rejected: true,
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "Bad name with Approval",
GroupName: "group2",
InString: "@group: LGTM",
},
{
Name: "LGTM approval",
GroupName: "group2",
InString: "@group2: LGTM",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approved",
Approved: true,
},
{
Name: "approval",
GroupName: "group2",
InString: "@group2: approve",
Approved: true,
},
{
Name: "disapproval",
GroupName: "group2",
InString: "@group2: disapprove",
Rejected: true,
},
}
func TestReviews(t *testing.T) {
for _, test := range tests {
t.Run(test.Name, func(t *testing.T) {
InitRegex(test.GroupName)
if r := ReviewAccepted(test.InString); r != test.Approved {
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
}
if r := ReviewRejected(test.InString); r != test.Rejected {
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
}
})
}
}

View File

@@ -7,25 +7,6 @@ import (
"src.opensuse.org/autogits/common"
)
type IssueCommentProcessor struct{}
func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
if req.Type != common.RequestType_IssueComment {
return fmt.Errorf("Unhandled, ignored request type: %s", req.Type)
}
data := req.Data.(*common.IssueCommentWebhookEvent)
org := data.Repository.Owner.Username
repo := data.Repository.Name
index := int64(data.Issue.Number)
pr, err := gitea.GetPullRequest(org, repo, index)
if err != nil {
return fmt.Errorf("Failed to fetch PullRequest from event: %s/%s!%d Error: %w", org, repo, index, err)
}
return ProcessPR(pr)
}
type ConfigUpdatePush struct {
config_modified chan *common.AutogitConfig
}

View File

@@ -1,7 +0,0 @@
Purpose
-------
Forwards PR as an OBS submit request when review requested.
Accepts a request when OBS request is accepted.
Rejects a request when OBS request is denied.

View File

@@ -108,7 +108,7 @@ func ProcessNotification(notification *models.NotificationThread) {
repo := match[2]
id, _ := strconv.ParseInt(match[3], 10, 64)
common.LogInfo("processing:", fmt.Sprintf("%s/%s!%d", org, repo, id))
common.LogInfo("processing:", fmt.Sprintf("%s/%s#%d", org, repo, id))
pr, err := Gitea.GetPullRequest(org, repo, id)
if err != nil {
common.LogError(" ** Cannot fetch PR associated with review:", subject.URL, "Error:", err)

View File

@@ -16,27 +16,3 @@ Target Usage
Any project (devel, etc) that accepts PR and wants build results
Configuration File
------------------
Bot reads `staging.config` from the project git or the PR to the project git.
It's a JSON file with following syntax
```
{
"ObsProject": "home:foo:project",
"StagingProject": "home:foo:project:staging",
"QA": [
{
"Name": "ProjectBuild",
"Origin": "home:foo:product:images"
}
]
}
```
* ObsProject: (**required**) Project where the base project is built. Builds in this project will be used to compare to builds based on sources from the PR
* StagingProject: template project that will be used as template for the staging project. Omitting this will use the ObsProject repositories to create the staging. Staging project will be created under the template, or in the bot's home directory if not specified.
* QA: set of projects to build ontop of the binaries built in staging.

View File

@@ -47,18 +47,15 @@ const (
Username = "autogits_obs_staging_bot"
)
var GiteaToken string
var runId uint
func FetchPrGit(git common.Git, pr *models.PullRequest) error {
// clone PR head via base (target) repo
cloneURL := pr.Base.Repo.CloneURL
// pass our token as user always
user, err := url.Parse(cloneURL)
common.PanicOnError(err)
user.User = url.User(common.GetGiteaToken())
cloneURL = user.String()
// clone PR head and base and return path
cloneURL := pr.Head.Repo.CloneURL
if GiteaUseSshClone {
cloneURL = pr.Head.Repo.SSHURL
}
if _, err := os.Stat(path.Join(git.GetPath(), pr.Head.Sha)); os.IsNotExist(err) {
common.PanicOnError(git.GitExec("", "clone", "--depth", "1", cloneURL, pr.Head.Sha))
common.PanicOnError(git.GitExec(pr.Head.Sha, "fetch", "--depth", "1", "origin", pr.Head.Sha, pr.MergeBase))
@@ -123,7 +120,7 @@ func ProcessBuildStatus(project, refProject *common.BuildResultList) BuildStatus
// the repositories should be setup equally between the projects. We
// need to verify that packages that are building in `refProject` are not
// failing in the `project`
BuildResultSorter := func(a, b *common.BuildResult) int {
BuildResultSorter := func(a, b common.BuildResult) int {
if c := strings.Compare(a.Repository, b.Repository); c != 0 {
return c
}
@@ -139,7 +136,7 @@ func ProcessBuildStatus(project, refProject *common.BuildResultList) BuildStatus
common.LogInfo("New package. Only need some success...")
SomeSuccess := false
for i := 0; i < len(project.Result); i++ {
repoRes := project.Result[i]
repoRes := &project.Result[i]
repoResStatus, ok := common.ObsRepoStatusDetails[repoRes.Code]
if !ok {
common.LogDebug("cannot find code:", repoRes.Code)
@@ -208,8 +205,8 @@ func ProcessBuildStatus(project, refProject *common.BuildResultList) BuildStatus
return BuildStatusSummaryFailed
}
func ProcessRepoBuildStatus(results, ref []*common.PackageBuildStatus) (status BuildStatusSummary, SomeSuccess bool) {
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
func ProcessRepoBuildStatus(results, ref []common.PackageBuildStatus) (status BuildStatusSummary, SomeSuccess bool) {
PackageBuildStatusSorter := func(a, b common.PackageBuildStatus) int {
return strings.Compare(a.Package, b.Package)
}
@@ -266,7 +263,7 @@ func ProcessRepoBuildStatus(results, ref []*common.PackageBuildStatus) (status B
return BuildStatusSummarySuccess, SomeSuccess
}
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string) (*common.ProjectMeta, error) {
common.LogDebug("repo content fetching ...")
err := FetchPrGit(git, pr)
if err != nil {
@@ -292,32 +289,7 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
}
// find modified directories and assume they are packages
// TODO: use _manifest for this here
headDirectories, err := git.GitDirectoryList(dir, pr.Head.Sha)
if err != nil {
return nil, err
}
baseDirectories, err := git.GitDirectoryList(dir, pr.MergeBase)
if err != nil {
return nil, err
}
for pkg, headOid := range headDirectories {
if baseOid, exists := baseDirectories[pkg]; !exists || baseOid != headOid {
modifiedOrNew = append(modifiedOrNew, pkg)
}
}
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
if err == nil {
// success, so we use that staging master project as our build project
buildPrj = stagingMasterPrj
} else {
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
meta, err = ObsClient.GetProjectMeta(buildPrj)
}
meta, err := ObsClient.GetProjectMeta(buildPrj)
if err != nil {
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
return nil, err
@@ -342,13 +314,13 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(pkg))
}
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
if len(meta.ScmSync) >= 65535 {
return nil, errors.New("Reached max amount of package changes per request")
}
meta.Title = fmt.Sprintf("PR#%d to %s", pr.Index, pr.Base.Name)
// QE wants it published ... also we should not hardcode it here, since
// it is configurable via the :PullRequest project
// meta.PublicFlags = common.Flags{Contents: "<disable/>"}
// Untouched content are flags and involved users. These can be configured
// via the staging project.
meta.Groups = nil
meta.Persons = nil
// set paths to parent project
for idx, r := range meta.Repositories {
@@ -442,8 +414,7 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
var state RequestModification = RequestModificationSourceChanged
if meta == nil {
// new build
common.LogDebug(" Staging master:", config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject)
if err != nil {
return RequestModificationNoChange, err
}
@@ -457,8 +428,6 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
} else {
err = ObsClient.SetProjectMeta(meta)
if err != nil {
x, _ := xml.MarshalIndent(meta, "", " ")
common.LogDebug(" meta:", string(x))
common.LogError("cannot create meta project:", err)
return RequestModificationNoChange, err
}
@@ -615,7 +584,7 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
}
if !pr.HasMerged && time.Since(time.Time(pr.Closed)) < time.Duration(config.CleanupDelay)*time.Hour {
common.LogInfo("Cooldown period for cleanup of", thread.Subject.HTMLURL)
common.LogInfo("Cooldown period for cleanup of", thread.URL)
return false
}
@@ -653,14 +622,6 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
return false // cleaned up now, but the cleanup was not aleady done
}
func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.CommitStatus) error {
_, err := gitea.SetCommitStatus(org, repo, hash, status)
if err != nil {
common.LogError(err)
}
return err
}
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
dir, err := os.MkdirTemp(os.TempDir(), BotName)
common.PanicOnError(err)
@@ -723,7 +684,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
stagingConfig, err := common.ParseStagingConfig(data)
if err != nil {
common.LogError("Error parsing config file", common.StagingConfigFile, err)
return true, err
}
if stagingConfig.ObsProject == "" {
@@ -736,7 +696,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
if err != nil || meta == nil {
if err != nil {
common.LogError("Cannot find reference project meta:", stagingConfig.ObsProject, err)
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot fetch reference project meta")
@@ -797,28 +757,23 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
common.LogDebug(" # head submodules:", len(headSubmodules))
common.LogDebug(" # base submodules:", len(baseSubmodules))
modifiedPackages := make([]string, 0, 16)
newPackages := make([]string, 0, 16)
modifiedOrNew := make([]string, 0, 16)
if !stagingConfig.RebuildAll {
for pkg, headOid := range headSubmodules {
if baseOid, exists := baseSubmodules[pkg]; !exists || baseOid != headOid {
if exists {
modifiedPackages = append(modifiedPackages, pkg)
} else {
newPackages = append(newPackages, pkg)
}
modifiedOrNew = append(modifiedOrNew, pkg)
common.LogDebug(pkg, ":", baseOid, "->", headOid)
}
}
}
if len(modifiedPackages) == 0 && len(newPackages) == 0 {
if len(modifiedOrNew) == 0 {
rebuild_all := false || stagingConfig.RebuildAll
reviews, err := gitea.GetPullRequestReviews(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index)
common.LogDebug("num reviews:", len(reviews))
if err == nil {
rebuild_rx := regexp.MustCompile("^@autogits_obs_staging_bot\\s*:?\\s*(re)?build\\s*all$")
rebuild_rx := regexp.MustCompile("^@autogits_obs_staging_bot\\s*:\\s*(re)?build\\s*all$")
done:
for _, r := range reviews {
for _, l := range common.SplitLines(r.Body) {
@@ -850,12 +805,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if !rebuild_all {
common.LogInfo("No package changes detected. Ignoring")
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "No package changes, not rebuilding project by default, accepting change")
if err != nil {
common.LogError(err)
} else {
return true, nil
}
_, err = gitea.AddReviewComment(pr, common.ReviewStateComment, "No package changes. Not rebuilding project by default")
}
return true, err
}
@@ -871,22 +821,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
TargetURL: ObsWebHost + "/project/show/" + stagingProject,
}
if err != nil {
msg := "Unable to setup stage project " + stagingConfig.ObsProject
status.Status = common.CommitStatus_Fail
common.LogError(msg)
if !IsDryRun {
SetStatus(gitea, org, repo, pr.Head.Sha, status)
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, msg)
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
return false, nil
}
msg := "Changed source updated for build"
if change == RequestModificationProjectCreated {
msg = "Build is started in " + ObsWebHost + "/project/show/" +
@@ -895,7 +829,8 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if len(stagingConfig.QA) > 0 {
msg = msg + "\nAdditional QA builds: \n"
}
SetStatus(gitea, org, repo, pr.Head.Sha, status)
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status)
for _, setup := range stagingConfig.QA {
CreateQASubProject(stagingConfig, git, gitea, pr,
stagingProject,
@@ -909,44 +844,42 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
gitea.AddComment(pr, msg)
}
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedPackages...)
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedOrNew...)
if err != nil {
common.LogError("failed fetching ref project status for", stagingConfig.ObsProject, ":", err)
}
stagingResult, err := ObsClient.BuildStatus(stagingProject)
if err != nil {
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
common.LogError("failed fetching ref project status for", stagingProject, ":", err)
}
buildStatus := ProcessBuildStatus(stagingResult, baseResult)
done := false
switch buildStatus {
case BuildStatusSummarySuccess:
status.Status = common.CommitStatus_Success
done = true
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, "Build successful")
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
case BuildStatusSummaryFailed:
status.Status = common.CommitStatus_Fail
done = true
if !IsDryRun {
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Build failed")
if err != nil {
common.LogError(err)
} else {
return true, nil
}
}
}
common.LogInfo("Build status:", buildStatus)
if !IsDryRun {
if err = SetStatus(gitea, org, repo, pr.Head.Sha, status); err != nil {
return false, err
}
}
return done, nil
gitea.SetCommitStatus(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Head.Sha, status)
// waiting for build results -- nothing to do
} else if err == NonActionableReviewError || err == NoReviewsFoundError {
return true, nil
@@ -1005,6 +938,7 @@ func PollWorkNotifications(giteaUrl string) {
var ListPullNotificationsOnly bool
var GiteaUrl string
var GiteaUseSshClone bool
var ObsWebHost string
var IsDryRun bool
var ProcessPROnly string
@@ -1028,6 +962,7 @@ func main() {
ProcessPROnly := flag.String("pr", "", "Process only specific PR and ignore the rest. Use for debugging")
buildRoot := flag.String("build-root", "", "Default build location for staging projects. Default is bot's home project")
flag.StringVar(&GiteaUrl, "gitea-url", "https://src.opensuse.org", "Gitea instance")
flag.BoolVar(&GiteaUseSshClone, "use-ssh-clone", false, "enforce cloning via ssh")
obsApiHost := flag.String("obs", "https://api.opensuse.org", "API for OBS instance")
flag.StringVar(&ObsWebHost, "obs-web", "", "Web OBS instance, if not derived from the obs config")
flag.BoolVar(&IsDryRun, "dry", false, "Dry-run, don't actually create any build projects or review changes")
@@ -1061,7 +996,7 @@ func main() {
}
if len(*ProcessPROnly) > 0 {
rx := regexp.MustCompile("^([^/#]+)/([^/#]+)#([0-9]+)$")
rx := regexp.MustCompile("^(\\w+)/(\\w+)#(\\d+)$")
m := rx.FindStringSubmatch(*ProcessPROnly)
if m == nil {
common.LogError("Cannot find any PR matches in", *ProcessPROnly)

View File

@@ -1,2 +1 @@
obs-status-service
*.svg

View File

@@ -4,17 +4,9 @@ OBS Status Service
Reports build status of OBS service as an easily to produce SVG
Requests for individual build results:
/status/obs:project/package/repo/arch
where `repo` and `arch` are optional parameters.
/obs:project/package/repo/arch
Requests for project results
/status/obs:project
Get requests for / will also return 404 statu normally. If the Backend redis
server is not available, it will return 500
/obs:project
Areas of Responsibility
@@ -31,14 +23,3 @@ Target Usage
* README.md of package git or project git
* comment section of a Gitea PR
Running
-------
Default parameters can be changed by env variables
| Environment variable | Default | Description
|---------------------------------|-----------------------------|------------
| `OBS_STATUS_SERVICE_OBS_URL` | https://build.opensuse.org | Location for creating build logs and monitor page build results
| `OBS_STATUS_SERVICE_LISTEN` | [::1]:8080 | Listening address and port
| `OBS_STATUS_SERVICE_CERT` | /run/obs-status-service.pem | Location of certificate file for service
| `OBS_STATUS_SERVICE_KEY` | /run/obs-status-service.pem | Location of key file for service

View File

@@ -19,20 +19,11 @@ package main
*/
import (
"encoding/json"
"encoding/xml"
"bytes"
"flag"
"fmt"
"html"
"io"
"log"
"maps"
"net/http"
"net/url"
"os"
"slices"
"strings"
"time"
"src.opensuse.org/autogits/common"
)
@@ -42,131 +33,52 @@ const (
)
var obs *common.ObsClient
var debug bool
type RepoBuildCounters struct {
Repository, Arch string
Status string
BuildStatusCounter map[string]int
func LogDebug(v ...any) {
if debug {
log.Println(v...)
}
}
func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
if len(res) == 0 {
func ProjectStatusSummarySvg(project string) []byte {
res := GetCurrentStatus(project)
if res == nil {
return nil
}
list := common.BuildResultList{
Result: res,
}
package_names := list.GetPackageList()
pkgs := res.GetPackageList()
maxLen := 0
for _, p := range package_names {
for _, p := range pkgs {
maxLen = max(maxLen, len(p))
}
// width := float32(len(list.Result))*1.5 + float32(maxLen)*0.8
// height := 1.5*float32(maxLen) + 30
ret := NewSvg(SvgType_Project)
width := float32(len(res.Result))*1.5 + float32(maxLen)*0.8
height := 1.5*float32(maxLen) + 30
status := make([]RepoBuildCounters, len(res))
for i, repo := range res {
status[i].Arch = repo.Arch
status[i].Repository = repo.Repository
status[i].Status = repo.Code
status[i].BuildStatusCounter = make(map[string]int)
for _, pkg := range repo.Status {
status[i].BuildStatusCounter[pkg.Code]++
}
}
slices.SortFunc(status, func(a, b RepoBuildCounters) int {
if r := strings.Compare(a.Repository, b.Repository); r != 0 {
return r
}
return strings.Compare(a.Arch, b.Arch)
})
repoName := ""
ret.ypos = 3.0
for _, repo := range status {
if repo.Repository != repoName {
repoName = repo.Repository
ret.WriteTitle(repoName)
}
ret.WriteSubtitle(repo.Arch)
statuses := slices.Sorted(maps.Keys(repo.BuildStatusCounter))
for _, status := range statuses {
ret.WriteProjectStatus(res[0].Project, repo.Repository, repo.Arch, status, repo.BuildStatusCounter[status])
}
}
return ret.GenerateSvg()
ret := bytes.Buffer{}
ret.WriteString(`<svg version="2.0" width="`)
ret.WriteString(fmt.Sprint(width))
ret.WriteString(`em" height="`)
ret.WriteString(fmt.Sprint(height))
ret.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
ret.WriteString(`<defs>
<g id="f"> <!-- failed -->
<rect width="1em" height="1em" fill="#800" />
</g>
<g id="s"> <!--succeeded-->
<rect width="1em" height="1em" fill="#080" />
</g>
<g id="buidling"> <!--building-->
<rect width="1em" height="1em" fill="#880" />
</g>
</defs>`)
ret.WriteString(`<use href="#f" x="1em" y="2em"/>`)
ret.WriteString(`</svg>`)
return ret.Bytes()
}
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
if R != nil && S != nil {
switch S.Code {
case "succeeded", "failed", "building":
return "/buildlog/" + url.PathEscape(R.Project) + "/" + url.PathEscape(S.Package) + "/" + url.PathEscape(R.Repository) + "/" + url.PathEscape(R.Arch)
}
}
return ""
}
func DeleteExceptPkg(pkg string) func(*common.PackageBuildStatus) bool {
return func(item *common.PackageBuildStatus) bool {
multibuild_prefix := pkg + ":"
return item.Package != pkg && !strings.HasPrefix(item.Package, multibuild_prefix)
}
}
func PackageStatusSummarySvg(pkg string, res []*common.BuildResult) []byte {
// per repo, per arch status bins
repo_names := []string{}
package_names := []string{}
multibuild_prefix := pkg + ":"
for _, r := range res {
if pos, found := slices.BinarySearchFunc(repo_names, r.Repository, strings.Compare); !found {
repo_names = slices.Insert(repo_names, pos, r.Repository)
}
for _, p := range r.Status {
if p.Package == pkg || strings.HasPrefix(p.Package, multibuild_prefix) {
if pos, found := slices.BinarySearchFunc(package_names, p.Package, strings.Compare); !found {
package_names = slices.Insert(package_names, pos, p.Package)
}
}
}
}
ret := NewSvg(SvgType_Package)
for _, pkg = range package_names {
// if len(package_names) > 1 {
ret.WriteTitle(pkg)
// }
for _, name := range repo_names {
ret.WriteSubtitle(name)
// print all repo arches here and build results
for _, r := range res {
if r.Repository != name {
continue
}
for _, s := range r.Status {
if s.Package == pkg {
link := LinkToBuildlog(r, s)
ret.WritePackageStatus(link, r.Arch, s.Code, s.Details)
}
}
}
}
}
return ret.GenerateSvg()
}
func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus) []byte {
func PackageStatusSummarySvg(status common.PackageBuildStatus) []byte {
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
if !ok {
buildStatus = common.ObsBuildStatusDetails["error"]
@@ -183,255 +95,72 @@ func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus)
}
}
buildlog := LinkToBuildlog(repo, status)
startTag := ""
endTag := ""
log.Println(status, " -> ", buildStatus)
if len(buildlog) > 0 {
startTag = "<a href=\"" + buildlog + "\">"
endTag = "</a>"
}
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">` +
`<rect width="100%" height="100%" fill="` + fillColor + `"/>` + startTag +
`<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + html.EscapeString(buildStatus.Code) + `</text>` + endTag + `</svg>`)
return []byte(`<svg version="2.0" width="8em" height="1.5em" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="` + fillColor + `"/>
<text x="4em" y="1.1em" text-anchor="middle" fill="` + textColor + `">` + buildStatus.Code + `</text>
</svg>`)
}
func WriteJson(data any, res http.ResponseWriter) {
if jsonArray, err := json.MarshalIndent(data, "", " "); err != nil {
res.WriteHeader(500)
} else {
res.Header().Add("size", fmt.Sprint(len(jsonArray)))
res.Write(jsonArray)
}
}
func WriteXml(data any, res http.ResponseWriter) {
if xmlData, err := xml.MarshalIndent(data, "", " "); err != nil {
res.WriteHeader(500)
} else {
res.Header().Add("size", fmt.Sprint(len(xmlData)))
res.Write([]byte("<resultlist>"))
res.Write(xmlData)
res.Write([]byte("</resultlist>"))
}
}
var ObsUrl *string
func main() {
obsUrlDef := os.Getenv("OBS_STATUS_SERVICE_OBS_URL")
if len(obsUrlDef) == 0 {
obsUrlDef = "https://build.opensuse.org"
}
listenDef := os.Getenv("OBS_STATUS_SERVICE_LISTEN")
if len(listenDef) == 0 {
listenDef = "[::1]:8080"
}
certDef := os.Getenv("OBS_STATUS_SERVICE_CERT")
if len(certDef) == 0 {
certDef = "/run/obs-status-service.pem"
}
keyDef := os.Getenv("OBS_STATUS_SERVICE_KEY")
if len(keyDef) == 0 {
keyDef = certDef
}
cert := flag.String("cert-file", certDef, "TLS certificates file")
key := flag.String("key-file", keyDef, "Private key for the TLS certificate")
listen := flag.String("listen", listenDef, "Listening string")
cert := flag.String("cert-file", "", "TLS certificates file")
key := flag.String("key-file", "", "Private key for the TLS certificate")
listen := flag.String("listen", "[::1]:8080", "Listening string")
disableTls := flag.Bool("no-tls", false, "Disable TLS")
ObsUrl = flag.String("obs-url", obsUrlDef, "OBS API endpoint for package buildlog information")
debug := flag.Bool("debug", false, "Enable debug logging")
obsHost := flag.String("obs-host", "api.opensuse.org", "OBS API endpoint for package status information")
flag.BoolVar(&debug, "debug", false, "Enable debug logging")
flag.Parse()
if *debug {
common.SetLoggingLevel(common.LogLevelDebug)
common.PanicOnError(common.RequireObsSecretToken())
var err error
if obs, err = common.NewObsClient(*obsHost); err != nil {
log.Fatal(err)
}
if redisUrl := os.Getenv("REDIS"); len(redisUrl) > 0 {
RedisConnect(redisUrl)
} else {
common.LogError("REDIS needs to contains URL of the OBS Redis instance with login information")
return
}
var rescanRepoError error
go func() {
for {
if rescanRepoError = RescanRepositories(); rescanRepoError != nil {
common.LogError("Failed to rescan repositories.", rescanRepoError)
}
time.Sleep(time.Minute * 5)
}
}()
http.HandleFunc("GET /", func(res http.ResponseWriter, req *http.Request) {
if rescanRepoError != nil {
res.WriteHeader(500)
return
}
res.WriteHeader(404)
res.Write([]byte("404 page not found\n"))
http.HandleFunc("GET /{Project}", func(res http.ResponseWriter, req *http.Request) {
res.WriteHeader(http.StatusBadRequest)
})
http.HandleFunc("GET /status/{Project}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
common.LogInfo(" GET /status/"+obsPrj, "["+mime.MimeType()+"]")
http.HandleFunc("GET /{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
/*
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
status := FindAndUpdateProjectResults(obsPrj)
if len(status) == 0 {
res.WriteHeader(404)
return
}
res.Header().Add("content-type", mime.MimeHeader)
if mime.IsSvg() {
svg := ProjectStatusSummarySvg(status)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
} else if mime.IsXml() {
WriteXml(status, res)
}
status, _ := PackageBuildStatus(obsPrj, obsPkg)
svg := PackageStatusSummarySvg(status)
*/
res.Header().Add("content-type", "image/svg+xml")
//res.Header().Add("size", fmt.Sprint(len(svg)))
//res.Write(svg)
})
http.HandleFunc("GET /status/{Project}/{Package}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
common.LogInfo(" GET /status/"+obsPrj+"/"+obsPkg, "["+mime.MimeType()+"]")
status := slices.Clone(FindAndUpdateProjectResults(obsPrj))
for i, s := range status {
f := *s
f.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
status[i] = &f
}
if len(status) == 0 {
res.WriteHeader(404)
return
}
res.Header().Add("content-type", mime.MimeHeader)
if mime.IsSvg() {
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
} else if mime.IsXml() {
WriteXml(status, res)
}
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
obsPrj := req.PathValue("Project")
obsPkg := req.PathValue("Package")
repo := req.PathValue("Repository")
common.LogInfo(" GET /status/"+obsPrj+"/"+obsPkg, "["+mime.MimeType()+"]")
status := slices.Clone(FindAndUpdateRepoResults(obsPrj, repo))
for i, s := range status {
f := *s
f.Status = slices.DeleteFunc(slices.Clone(s.Status), DeleteExceptPkg(obsPkg))
status[i] = &f
}
if len(status) == 0 {
res.WriteHeader(404)
return
}
if mime.IsSvg() {
svg := PackageStatusSummarySvg(obsPkg, status)
res.Header().Add("content-type", mime.MimeHeader)
res.Header().Add("size", fmt.Sprint(len(svg)))
res.Write(svg)
} else if mime.IsJson() {
WriteJson(status, res)
} else if mime.IsXml() {
WriteXml(status, res)
}
})
http.HandleFunc("GET /status/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
mime := ParseMimeHeader(req)
http.HandleFunc("GET /{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
common.LogInfo(" GET /status/"+prj+"/"+pkg+"/"+repo+"/"+arch, "["+mime.MimeType()+"]")
res.Header().Add("content-type", mime.MimeHeader)
for _, r := range FindAndUpdateRepoResults(prj, repo) {
if r.Arch == arch {
if idx, found := slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
status := r.Status[idx]
if mime.IsSvg() {
res.Write(BuildStatusSvg(r, status))
} else if mime.IsJson() {
WriteJson(status, res)
} else if mime.IsXml() {
WriteXml(status, res)
res.Header().Add("content-type", "image/svg+xml")
prjStatus := GetCurrentStatus(prj)
if prjStatus == nil {
return
}
for _, r := range prjStatus.Result {
if r.Arch == arch && r.Repository == repo {
for _, status := range r.Status {
if status.Package == pkg {
res.Write(PackageStatusSummarySvg(status))
return
}
return
}
break
}
}
if mime.IsSvg() {
res.Write(BuildStatusSvg(nil, &common.PackageBuildStatus{Package: pkg, Code: "unknown"}))
}
})
http.HandleFunc("GET /search", func(res http.ResponseWriter, req *http.Request) {
common.LogInfo("GET /search?" + req.URL.RawQuery)
queries := req.URL.Query()
if !queries.Has("q") {
res.WriteHeader(400)
return
}
names := queries["q"]
if len(names) != 1 {
res.WriteHeader(400)
return
}
packages := FindPackages(names[0])
data, err := json.MarshalIndent(packages, "", " ")
if err != nil {
res.WriteHeader(500)
common.LogError("Error in marshalling data.", err)
return
}
res.Write(data)
res.Header().Add("content-type", "application/json")
res.WriteHeader(200)
})
http.HandleFunc("GET /buildlog/{Project}/{Package}/{Repository}/{Arch}", func(res http.ResponseWriter, req *http.Request) {
prj := req.PathValue("Project")
pkg := req.PathValue("Package")
repo := req.PathValue("Repository")
arch := req.PathValue("Arch")
res.Header().Add("location", *ObsUrl+"/package/live_build_log/"+url.PathEscape(prj)+"/"+url.PathEscape(pkg)+"/"+url.PathEscape(repo)+"/"+url.PathEscape(arch))
res.WriteHeader(307)
return
// status := GetDetailedBuildStatus(prj, pkg, repo, arch)
data, err := obs.BuildLog(prj, pkg, repo, arch)
if err != nil {
res.WriteHeader(http.StatusInternalServerError)
common.LogError("Failed to fetch build log for:", prj, pkg, repo, arch, err)
return
}
defer data.Close()
io.Copy(res, data)
})
go ProcessUpdates()
if *disableTls {
log.Fatal(http.ListenAndServe(*listen, nil))

View File

@@ -1,84 +0,0 @@
package main
import (
"os"
"testing"
"src.opensuse.org/autogits/common"
)
func TestStatusSvg(t *testing.T) {
ObsUrl = &[]string{"http://nothing.is.here"}[0]
os.WriteFile("teststatus.svg", BuildStatusSvg(nil, &common.PackageBuildStatus{
Package: "foo",
Code: "succeeded",
Details: "more success here",
}), 0o777)
data := []*common.BuildResult{
{
Project: "project:foo",
Repository: "repo1",
Arch: "x86_64",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "s390x",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "unresolveable",
},
},
},
{
Project: "project:foo",
Repository: "repo1",
Arch: "i586",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "succeeded",
},
{
Package: "pkg2",
Code: "blocked",
Details: "foo bar is why",
},
},
},
{
Project: "project:foo",
Repository: "TW",
Arch: "s390",
Status: []*common.PackageBuildStatus{
{
Package: "pkg1",
Code: "excluded",
},
{
Package: "pkg2",
Code: "failed",
},
},
},
}
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
}

View File

@@ -1,62 +0,0 @@
package main
import (
"net/http"
"strings"
)
type MimeHeader struct {
MimeHeader string
}
const (
JsonMime = "application/json"
XmlMime = "application/obs+xml"
SvgMime = "image/svg+xml"
)
var AcceptedStatusMimes []string = []string{
SvgMime,
JsonMime,
XmlMime,
}
func ParseMimeHeader(req *http.Request) *MimeHeader {
proposedMimes := req.Header.Values("Accept")
mime := MimeHeader{MimeHeader: SvgMime}
if len(proposedMimes) == 0 {
return &mime
}
for _, m := range proposedMimes {
for _, am := range AcceptedStatusMimes {
if strings.Contains(m, am) {
mime.MimeHeader = am
return &mime
}
}
}
return &mime
}
func (m *MimeHeader) IsJson() bool {
return m.MimeHeader == JsonMime
}
func (m *MimeHeader) IsXml() bool {
return m.MimeHeader == XmlMime
}
func (m *MimeHeader) IsSvg() bool {
return m.MimeHeader == SvgMime
}
func (m *MimeHeader) MimeType() string {
if m.IsJson() {
return JsonMime
} else if m.IsXml() {
return XmlMime
}
return SvgMime // default
}

View File

@@ -1,266 +0,0 @@
package main
import (
"context"
"slices"
"strings"
"sync"
"time"
"github.com/redis/go-redis/v9"
"src.opensuse.org/autogits/common"
)
var RepoStatus []*common.BuildResult = []*common.BuildResult{}
var RepoStatusLock *sync.RWMutex = &sync.RWMutex{}
var redisClient *redis.Client
func RedisConnect(RedisUrl string) {
opts, err := redis.ParseURL(RedisUrl)
if err != nil {
panic(err)
}
redisClient = redis.NewClient(opts)
}
func UpdateResults(r *common.BuildResult) {
RepoStatusLock.Lock()
defer RepoStatusLock.Unlock()
updateResultsWithoutLocking(r)
}
func updateResultsWithoutLocking(r *common.BuildResult) {
key := "result." + r.Project + "/" + r.Repository + "/" + r.Arch
data, err := redisClient.HGetAll(context.Background(), key).Result()
if err != nil {
common.LogError("Failed fetching build results for", key, err)
}
reset_time := time.Date(1000, 1, 1, 1, 1, 1, 1, time.Local)
for _, pkg := range r.Status {
pkg.LastUpdate = reset_time
}
r.LastUpdate = time.Now()
for pkg, result := range data {
if strings.HasPrefix(result, "scheduled") {
// TODO: lookup where's building
result = "building"
}
var idx int
var found bool
var code string
var details string
if pos := strings.IndexByte(result, ':'); pos > -1 && pos < len(result) {
code = result[0:pos]
details = result[pos+1:]
} else {
code = result
details = ""
}
if idx, found = slices.BinarySearchFunc(r.Status, &common.PackageBuildStatus{Package: pkg}, common.PackageBuildStatusComp); found {
res := r.Status[idx]
res.LastUpdate = r.LastUpdate
res.Code = code
res.Details = details
} else {
r.Status = slices.Insert(r.Status, idx, &common.PackageBuildStatus{
Package: pkg,
Code: code,
Details: details,
LastUpdate: r.LastUpdate,
})
}
}
for idx := 0; idx < len(r.Status); {
if r.Status[idx].LastUpdate == reset_time {
r.Status = slices.Delete(r.Status, idx, idx+1)
} else {
idx++
}
}
}
func FindProjectResults(project string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindProjectResultsNoLock(project)
}
func FindProjectResultsNoLock(project string) []*common.BuildResult {
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindRepoResults(project, repo string) []*common.BuildResult {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindRepoResultsNoLock(project, repo)
}
func FindRepoResultsNoLock(project, repo string) []*common.BuildResult {
ret := make([]*common.BuildResult, 0, 8)
idx, _ := slices.BinarySearchFunc(RepoStatus, &common.BuildResult{Project: project, Repository: repo}, common.BuildResultComp)
for idx < len(RepoStatus) && RepoStatus[idx].Project == project && RepoStatus[idx].Repository == repo {
ret = append(ret, RepoStatus[idx])
idx++
}
return ret
}
func FindPackages(pkg string) []string {
RepoStatusLock.RLock()
defer RepoStatusLock.RUnlock()
return FindPackagesNoLock(pkg)
}
func FindPackagesNoLock(pkg string) []string {
data := make([]string, 0, 100)
for _, repo := range RepoStatus {
for _, status := range repo.Status {
if pkg == status.Package {
entry := repo.Project + "/" + pkg
if idx, found := slices.BinarySearch(data, entry); !found {
data = slices.Insert(data, idx, entry)
if len(data) >= 100 {
return data
}
}
}
}
}
return data
}
func FindAndUpdateProjectResults(project string) []*common.BuildResult {
res := FindProjectResults(project)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func FindAndUpdateRepoResults(project, repo string) []*common.BuildResult {
res := FindRepoResults(project, repo)
wg := &sync.WaitGroup{}
now := time.Now()
for _, r := range res {
if now.Sub(r.LastUpdate).Abs() < time.Second*10 {
// 1 update per 10 second for now
continue
}
wg.Add(1)
go func() {
UpdateResults(r)
wg.Done()
}()
}
wg.Wait()
return res
}
func RescanRepositories() error {
ctx := context.Background()
var cursor uint64
var err error
common.LogDebug("** starting rescanning ...")
RepoStatusLock.Lock()
for _, repo := range RepoStatus {
repo.Dirty = false
}
RepoStatusLock.Unlock()
var count int
projectsLooked := make([]string, 0, 10000)
for {
var data []string
data, cursor, err = redisClient.ScanType(ctx, cursor, "", 1000, "hash").Result()
if err != nil {
return err
}
wg := &sync.WaitGroup{}
RepoStatusLock.Lock()
for _, repo := range data {
r := strings.Split(repo, "/")
if len(r) != 3 || len(r[0]) < 8 || r[0][0:7] != "result." {
continue
}
d := &common.BuildResult{
Project: r[0][7:],
Repository: r[1],
Arch: r[2],
}
var pos int
var found bool
if pos, found = slices.BinarySearchFunc(RepoStatus, d, common.BuildResultComp); found {
RepoStatus[pos].Dirty = true
} else {
d.Dirty = true
RepoStatus = slices.Insert(RepoStatus, pos, d)
count++
}
// fetch all keys, one per non-maintenance/non-home: projects, for package search
if idx, found := slices.BinarySearch(projectsLooked, d.Project); !found && !strings.Contains(d.Project, ":Maintenance:") && (len(d.Project) < 5 || d.Project[0:5] != "home:") {
projectsLooked = slices.Insert(projectsLooked, idx, d.Project)
wg.Add(1)
go func(r *common.BuildResult) {
updateResultsWithoutLocking(r)
wg.Done()
}(RepoStatus[pos])
}
}
wg.Wait()
RepoStatusLock.Unlock()
if cursor == 0 {
break
}
}
common.LogDebug(" added a total", count, "repos")
count = 0
RepoStatusLock.Lock()
for i := 0; i < len(RepoStatus); {
if !RepoStatus[i].Dirty {
RepoStatus = slices.Delete(RepoStatus, i, i+1)
count++
} else {
i++
}
}
RepoStatusLock.Unlock()
common.LogDebug(" removed", count, "repos")
common.LogDebug(" total repos:", len(RepoStatus))
return nil
}

View File

@@ -0,0 +1,82 @@
package main
import (
"log"
"slices"
"sync"
"time"
"src.opensuse.org/autogits/common"
)
var WatchedRepos []string
var mutex sync.Mutex
var StatusUpdateCh chan StatusUpdateMsg = make(chan StatusUpdateMsg)
var statusMutex sync.RWMutex
var CurrentStatus map[string]*common.BuildResultList = make(map[string]*common.BuildResultList)
type StatusUpdateMsg struct {
ObsProject string
Result *common.BuildResultList
}
func GetCurrentStatus(project string) *common.BuildResultList {
statusMutex.RLock()
defer statusMutex.RUnlock()
if ret, found := CurrentStatus[project]; found {
return ret
} else {
go WatchObsProject(obs, project)
return nil
}
}
func ProcessUpdates() {
for {
msg := <-StatusUpdateCh
statusMutex.Lock()
CurrentStatus[msg.ObsProject] = msg.Result
drainedChannel:
for {
select {
case msg = <-StatusUpdateCh:
CurrentStatus[msg.ObsProject] = msg.Result
default:
statusMutex.Unlock()
break drainedChannel
}
}
}
}
func WatchObsProject(obs common.ObsStatusFetcherWithState, ObsProject string) {
old_state := ""
mutex.Lock()
if pos, found := slices.BinarySearch(WatchedRepos, ObsProject); found {
mutex.Unlock()
return
} else {
WatchedRepos = slices.Insert(WatchedRepos, pos, ObsProject)
mutex.Unlock()
}
LogDebug("+ watching", ObsProject)
opts := common.BuildResultOptions{}
for {
state, err := obs.BuildStatusWithState(ObsProject, &opts)
if err != nil {
log.Println(" *** Error fetching build for", ObsProject, err)
time.Sleep(time.Minute)
} else {
opts.OldState = state.State
LogDebug(" --> update", ObsProject, " => ", old_state)
StatusUpdateCh <- StatusUpdateMsg{ObsProject: ObsProject, Result: state}
}
}
}

View File

@@ -0,0 +1,34 @@
package main
import (
"testing"
"go.uber.org/mock/gomock"
"src.opensuse.org/autogits/common"
mock_common "src.opensuse.org/autogits/common/mock"
)
func TestWatchObsProject(t *testing.T) {
tests := []struct {
name string
res common.BuildResultList
}{
{
name: "two requests",
res: common.BuildResultList{
State: "success",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctl := gomock.NewController(t)
obs := mock_common.NewMockObsStatusFetcherWithState(ctl)
obs.EXPECT().BuildStatusWithState("test:foo", "").Return(&test.res, nil)
WatchObsProject(obs, "test:foo")
})
}
}

View File

@@ -1,149 +0,0 @@
package main
import (
"bytes"
"fmt"
"html"
"net/url"
"slices"
)
type SvgWriter struct {
ypos float64
header []byte
out bytes.Buffer
}
const (
SvgType_Package = iota
SvgType_Project
)
func NewSvg(SvgType int) *SvgWriter {
svg := &SvgWriter{}
svg.header = []byte(`<svg version="2.0" overflow="auto" width="40ex" height="`)
svg.out.WriteString(`em" xmlns="http://www.w3.org/2000/svg">`)
switch SvgType {
case SvgType_Package:
svg.out.WriteString(`<defs>
<g id="s">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="green" fill="#efe" rx="5" />
<text x="2.5ex" y="1.1em">succeeded</text>
</g>
<g id="f">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="red" fill="#fee" rx="5" />
<text x="5ex" y="1.1em">failed</text>
</g>
<g id="b">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fbf" rx="5" />
<text x="3.75ex" y="1.1em">blocked</text>
</g>
<g id="broken">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="#fff" rx="5" />
<text x="4.5ex" y="1.1em" stroke="red" fill="red">broken</text>
</g>
<g id="build">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#664" rx="5" />
<text x="3.75ex" y="1.1em" fill="yellow">building</text>
</g>
<g id="u">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="yellow" fill="#555" rx="5" />
<text x="2ex" y="1.1em" fill="orange">unresolvable</text>
</g>
<g id="scheduled">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="blue" fill="none" rx="5" />
<text x="3ex" y="1.1em" stroke="none" fill="blue">scheduled</text>
</g>
<g id="d">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">disabled</text>
</g>
<g id="e">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="#aaf">excluded</text>
</g>
<g id="un">
<rect width="15ex" height="1.5em" stroke-width="1" stroke="grey" fill="none" rx="5" />
<text x="4ex" y="1.1em" stroke="none" fill="grey">unknown</text>
</g>
<rect id="repotitle" width="100%" height="2em" stroke-width="1" stroke="grey" fill="grey" rx="2" />
</defs>`)
case SvgType_Project:
svg.out.WriteString(`<defs>
</defs>`)
}
return svg
}
func (svg *SvgWriter) WriteTitle(title string) {
svg.out.WriteString(`<text stroke="black" fill="black" x="1ex" y="` + fmt.Sprint(svg.ypos-.5) + `em">` + html.EscapeString(title) + "</text>")
svg.ypos += 2.5
}
func (svg *SvgWriter) WriteSubtitle(subtitle string) {
svg.out.WriteString(`<use href="#repotitle" y="` + fmt.Sprint(svg.ypos-2) + `em"/>`)
svg.out.WriteString(`<text stroke="black" fill="black" x="3ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + html.EscapeString(subtitle) + `</text>`)
svg.ypos += 2
}
func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
StatusToSVG := func(S string) string {
switch S {
case "succeeded":
return "s"
case "failed":
return "f"
case "broken", "scheduled":
return S
case "blocked":
return "b"
case "building":
return "build"
case "unresolvable":
return "u"
case "disabled":
return "d"
case "excluded":
return "e"
}
return "un"
}
svg.out.WriteString(`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-.6) + `em">` + html.EscapeString(arch) + `</text>`)
svg.out.WriteString(`<g>`)
if len(loglink) > 0 {
u, err := url.Parse(loglink)
if err == nil {
svg.out.WriteString(`<a href="` + u.String() + `" target="_blank" rel="noopener">`)
}
}
svg.out.WriteString(`<use href="#` + StatusToSVG(status) + `" x="20ex" y="` + fmt.Sprint(svg.ypos-1.7) + `em"/>`)
if len(loglink) > 0 {
svg.out.WriteString(`</a>`)
}
if len(detail) > 0 {
svg.out.WriteString(`<title>` + html.EscapeString(detail) + "</title>")
}
svg.out.WriteString("</g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) WriteProjectStatus(project, repo, arch, status string, count int) {
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&amp;" + url.QueryEscape(status) + "=1&amp;arch_" + url.QueryEscape(arch) + "=1&amp;repo_" + url.QueryEscape(repo) + "=1")
if err != nil {
return
}
svg.out.WriteString(`<g><a href="` + u.String() + `" target="_blank" rel="noopener">` + "\n" +
`<text fill="#113" x="5ex" y="` + fmt.Sprint(svg.ypos-0.6) + "em\">\n" +
html.EscapeString(status+": ") + fmt.Sprint(count) + "</text></a></g>\n")
svg.ypos += 2
}
func (svg *SvgWriter) GenerateSvg() []byte {
return slices.Concat(svg.header, []byte(fmt.Sprint(svg.ypos)), svg.out.Bytes(), []byte("</svg>"))
}

View File

@@ -1,24 +0,0 @@
Reparent Bot
============
To be able to put new parents of repositories as special forks into
pool and other projects.
Areas of Responsibilities
-------------------------
* monitor issues for Add packages
+ issue creator *must be* owner of the repo, OR
+ repository must not be a fork
* assign organization Owner to review request
* reparent the repository and create a PR
* remove non-accepted repositories from /pool, if no other
branches are relevant here
Target Usage
------------
* devel and released products

View File

@@ -5,7 +5,7 @@ After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/gitea-events-rabbitmq-publisher
EnvironmentFile=-/etc/default/gitea-events-rabbitmq-publisher.env
EnvironmentFile=-/etc/sysconfig/gitea-events-rabbitmq-publisher.env
DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict

View File

@@ -1,16 +0,0 @@
[Unit]
Description=Staging bot for project git PRs in OBS
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/obs-staging-bot
EnvironmentFile=-/etc/default/obs-staging-bot.env
DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict
[Install]
WantedBy=multi-user.target

View File

@@ -1,15 +0,0 @@
[Unit]
Description=OBS build status as SVG service
After=network-online.target
[Service]
Type=exec
Restart=on-failure
ExecStart=/usr/bin/obs-status-service
EnvironmentFile=-/etc/default/obs-status-service.env
DynamicUser=yes
ProtectSystem=strict
[Install]
WantedBy=multi-user.target

View File

@@ -1,4 +0,0 @@
package main
// exists only to import this for go.modules
import "go.uber.org/mock/mockgen/model"

View File

@@ -1,9 +0,0 @@
Purpose
-------
Automatically resolve git configlicts in .gitmodules if there's a conflict
due to a merge.
It uses HEAD and MERGE_HEAD to calculate merge base and pass it to the
conflict resolution

View File

@@ -1,42 +0,0 @@
package main
import (
"os"
"strings"
"src.opensuse.org/autogits/common"
)
func main() {
cwd, err := os.Getwd()
if err != nil {
common.LogError(err)
return
}
gh, err := common.AllocateGitWorkTree(cwd, "", "")
if err != nil {
common.LogError(err)
return
}
git, err := gh.ReadExistingPath("")
if err != nil {
common.LogError(err)
return
}
MergeBase := strings.TrimSpace(git.GitExecWithOutputOrPanic("", "merge-base", "HEAD", "MERGE_HEAD"))
status, err := git.GitStatus("")
if err != nil {
common.LogError(err)
return
}
for _, s := range status {
if s.Path == ".gitmodules" && s.Status == common.GitStatus_Unmerged {
if err := git.GitResolveSubmoduleFileConflict(s, "", MergeBase, "HEAD", "MERGE_HEAD"); err != nil {
common.LogError(err)
}
}
}
}

View File

@@ -1,27 +0,0 @@
package main
import (
"io"
"log"
"os"
"github.com/tailscale/hujson"
)
func main() {
data, err := io.ReadAll(os.Stdin)
if err != nil {
log.Println(err)
os.Exit(1)
}
json, err := hujson.Standardize(data)
if err != nil {
log.Println(err)
os.Exit(2)
}
_, err = os.Stdout.Write(json)
if err != nil {
log.Print(err)
os.Exit(3)
}
}

BIN
vendor.tar.zst (Stored with Git LFS) Normal file

Binary file not shown.

View File

@@ -1,15 +0,0 @@
bin/
.idea/
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out

View File

@@ -1,12 +0,0 @@
language: go
dist: xenial
go:
- '1.10'
- '1.11'
- '1.12'
- '1.13'
- 'tip'
script:
- go test -coverpkg=./... -coverprofile=coverage.info -timeout=5s
- bash <(curl -s https://codecov.io/bash)

View File

@@ -1,43 +0,0 @@
# Contributor Code of Conduct
This project adheres to [The Code Manifesto](http://codemanifesto.com)
as its guidelines for contributor interactions.
## The Code Manifesto
We want to work in an ecosystem that empowers developers to reach their
potential — one that encourages growth and effective collaboration. A space
that is safe for all.
A space such as this benefits everyone that participates in it. It encourages
new developers to enter our field. It is through discussion and collaboration
that we grow, and through growth that we improve.
In the effort to create such a place, we hold to these values:
1. **Discrimination limits us.** This includes discrimination on the basis of
race, gender, sexual orientation, gender identity, age, nationality,
technology and any other arbitrary exclusion of a group of people.
2. **Boundaries honor us.** Your comfort levels are not everyones comfort
levels. Remember that, and if brought to your attention, heed it.
3. **We are our biggest assets.** None of us were born masters of our trade.
Each of us has been helped along the way. Return that favor, when and where
you can.
4. **We are resources for the future.** As an extension of #3, share what you
know. Make yourself a resource to help those that come after you.
5. **Respect defines us.** Treat others as you wish to be treated. Make your
discussions, criticisms and debates from a position of respectfulness. Ask
yourself, is it true? Is it necessary? Is it constructive? Anything less is
unacceptable.
6. **Reactions require grace.** Angry responses are valid, but abusive language
and vindictive actions are toxic. When something happens that offends you,
handle it assertively, but be respectful. Escalate reasonably, and try to
allow the offender an opportunity to explain themselves, and possibly
correct the issue.
7. **Opinions are just that: opinions.** Each and every one of us, due to our
background and upbringing, have varying opinions. That is perfectly
acceptable. Remember this: if you respect your own opinions, you should
respect the opinions of others.
8. **To err is human.** You might not intend it, but mistakes do happen and
contribute to build experience. Tolerate honest mistakes, and don't
hesitate to apologize if you make one yourself.

View File

@@ -1,63 +0,0 @@
#### Support
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
#### What to contribute
If you don't know what to do, there are some features and functions that need to be done
- [ ] Refactor code
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
- [ ] Create actual list of contributors and projects that currently using this package
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
- [ ] Implement fuzzing testing
- [ ] Implement some struct/map/array utilities
- [ ] Implement map/array validation
- [ ] Implement benchmarking
- [ ] Implement batch of examples
- [ ] Look at forks for new features and fixes
#### Advice
Feel free to create what you want, but keep in mind when you implement new features:
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
- Public functions must be documented and described in source file and added to README.md to the list of available functions
- There are must be unit-tests for any new functions and improvements
## Financial contributions
We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator).
Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed.
## Credits
### Contributors
Thank you to all the people who have already contributed to govalidator!
<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
### Backers
Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)]
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
### Sponsors
Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor))
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>

View File

@@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014-2020 Alex Saskevich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,622 +0,0 @@
govalidator
===========
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator)
[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator)
[![Coverage](https://codecov.io/gh/asaskevich/govalidator/branch/master/graph/badge.svg)](https://codecov.io/gh/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [![Backers on Open Collective](https://opencollective.com/govalidator/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/govalidator/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield)
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
#### Installation
Make sure that Go is installed on your computer.
Type the following command in your terminal:
go get github.com/asaskevich/govalidator
or you can get specified release of the package with `gopkg.in`:
go get gopkg.in/asaskevich/govalidator.v10
After it the package is ready to use.
#### Import package in your project
Add following line in your `*.go` file:
```go
import "github.com/asaskevich/govalidator"
```
If you are unhappy to use long `govalidator`, you can do something like this:
```go
import (
valid "github.com/asaskevich/govalidator"
)
```
#### Activate behavior to require all fields have a validation tag by default
`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors.
```go
import "github.com/asaskevich/govalidator"
func init() {
govalidator.SetFieldsRequiredByDefault(true)
}
```
Here's some code to explain it:
```go
// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
type exampleStruct struct {
Name string ``
Email string `valid:"email"`
}
// this, however, will only fail when Email is empty or an invalid email address:
type exampleStruct2 struct {
Name string `valid:"-"`
Email string `valid:"email"`
}
// lastly, this will only fail when Email is an invalid email address but not when it's empty:
type exampleStruct2 struct {
Name string `valid:"-"`
Email string `valid:"email,optional"`
}
```
#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
##### Custom validator function signature
A context was added as the second parameter, for structs this is the object being validated this makes dependent validation possible.
```go
import "github.com/asaskevich/govalidator"
// old signature
func(i interface{}) bool
// new signature
func(i interface{}, o interface{}) bool
```
##### Adding a custom validator
This was changed to prevent data races when accessing custom validators.
```go
import "github.com/asaskevich/govalidator"
// before
govalidator.CustomTypeTagMap["customByteArrayValidator"] = func(i interface{}, o interface{}) bool {
// ...
}
// after
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, o interface{}) bool {
// ...
})
```
#### List of functions:
```go
func Abs(value float64) float64
func BlackList(str, chars string) string
func ByteLength(str string, params ...string) bool
func CamelCaseToUnderscore(str string) string
func Contains(str, substring string) bool
func Count(array []interface{}, iterator ConditionIterator) int
func Each(array []interface{}, iterator Iterator)
func ErrorByField(e error, field string) string
func ErrorsByField(e error) map[string]string
func Filter(array []interface{}, iterator ConditionIterator) []interface{}
func Find(array []interface{}, iterator ConditionIterator) interface{}
func GetLine(s string, index int) (string, error)
func GetLines(s string) []string
func HasLowerCase(str string) bool
func HasUpperCase(str string) bool
func HasWhitespace(str string) bool
func HasWhitespaceOnly(str string) bool
func InRange(value interface{}, left interface{}, right interface{}) bool
func InRangeFloat32(value, left, right float32) bool
func InRangeFloat64(value, left, right float64) bool
func InRangeInt(value, left, right interface{}) bool
func IsASCII(str string) bool
func IsAlpha(str string) bool
func IsAlphanumeric(str string) bool
func IsBase64(str string) bool
func IsByteLength(str string, min, max int) bool
func IsCIDR(str string) bool
func IsCRC32(str string) bool
func IsCRC32b(str string) bool
func IsCreditCard(str string) bool
func IsDNSName(str string) bool
func IsDataURI(str string) bool
func IsDialString(str string) bool
func IsDivisibleBy(str, num string) bool
func IsEmail(str string) bool
func IsExistingEmail(email string) bool
func IsFilePath(str string) (bool, int)
func IsFloat(str string) bool
func IsFullWidth(str string) bool
func IsHalfWidth(str string) bool
func IsHash(str string, algorithm string) bool
func IsHexadecimal(str string) bool
func IsHexcolor(str string) bool
func IsHost(str string) bool
func IsIP(str string) bool
func IsIPv4(str string) bool
func IsIPv6(str string) bool
func IsISBN(str string, version int) bool
func IsISBN10(str string) bool
func IsISBN13(str string) bool
func IsISO3166Alpha2(str string) bool
func IsISO3166Alpha3(str string) bool
func IsISO4217(str string) bool
func IsISO693Alpha2(str string) bool
func IsISO693Alpha3b(str string) bool
func IsIn(str string, params ...string) bool
func IsInRaw(str string, params ...string) bool
func IsInt(str string) bool
func IsJSON(str string) bool
func IsLatitude(str string) bool
func IsLongitude(str string) bool
func IsLowerCase(str string) bool
func IsMAC(str string) bool
func IsMD4(str string) bool
func IsMD5(str string) bool
func IsMagnetURI(str string) bool
func IsMongoID(str string) bool
func IsMultibyte(str string) bool
func IsNatural(value float64) bool
func IsNegative(value float64) bool
func IsNonNegative(value float64) bool
func IsNonPositive(value float64) bool
func IsNotNull(str string) bool
func IsNull(str string) bool
func IsNumeric(str string) bool
func IsPort(str string) bool
func IsPositive(value float64) bool
func IsPrintableASCII(str string) bool
func IsRFC3339(str string) bool
func IsRFC3339WithoutZone(str string) bool
func IsRGBcolor(str string) bool
func IsRegex(str string) bool
func IsRequestURI(rawurl string) bool
func IsRequestURL(rawurl string) bool
func IsRipeMD128(str string) bool
func IsRipeMD160(str string) bool
func IsRsaPub(str string, params ...string) bool
func IsRsaPublicKey(str string, keylen int) bool
func IsSHA1(str string) bool
func IsSHA256(str string) bool
func IsSHA384(str string) bool
func IsSHA512(str string) bool
func IsSSN(str string) bool
func IsSemver(str string) bool
func IsTiger128(str string) bool
func IsTiger160(str string) bool
func IsTiger192(str string) bool
func IsTime(str string, format string) bool
func IsType(v interface{}, params ...string) bool
func IsURL(str string) bool
func IsUTFDigit(str string) bool
func IsUTFLetter(str string) bool
func IsUTFLetterNumeric(str string) bool
func IsUTFNumeric(str string) bool
func IsUUID(str string) bool
func IsUUIDv3(str string) bool
func IsUUIDv4(str string) bool
func IsUUIDv5(str string) bool
func IsULID(str string) bool
func IsUnixTime(str string) bool
func IsUpperCase(str string) bool
func IsVariableWidth(str string) bool
func IsWhole(value float64) bool
func LeftTrim(str, chars string) string
func Map(array []interface{}, iterator ResultIterator) []interface{}
func Matches(str, pattern string) bool
func MaxStringLength(str string, params ...string) bool
func MinStringLength(str string, params ...string) bool
func NormalizeEmail(str string) (string, error)
func PadBoth(str string, padStr string, padLen int) string
func PadLeft(str string, padStr string, padLen int) string
func PadRight(str string, padStr string, padLen int) string
func PrependPathToErrors(err error, path string) error
func Range(str string, params ...string) bool
func RemoveTags(s string) string
func ReplacePattern(str, pattern, replace string) string
func Reverse(s string) string
func RightTrim(str, chars string) string
func RuneLength(str string, params ...string) bool
func SafeFileName(str string) string
func SetFieldsRequiredByDefault(value bool)
func SetNilPtrAllowedByRequired(value bool)
func Sign(value float64) float64
func StringLength(str string, params ...string) bool
func StringMatches(s string, params ...string) bool
func StripLow(str string, keepNewLines bool) string
func ToBoolean(str string) (bool, error)
func ToFloat(str string) (float64, error)
func ToInt(value interface{}) (res int64, err error)
func ToJSON(obj interface{}) (string, error)
func ToString(obj interface{}) string
func Trim(str, chars string) string
func Truncate(str string, length int, ending string) string
func TruncatingErrorf(str string, args ...interface{}) error
func UnderscoreToCamelCase(s string) string
func ValidateMap(inputMap map[string]interface{}, validationMap map[string]interface{}) (bool, error)
func ValidateStruct(s interface{}) (bool, error)
func WhiteList(str, chars string) string
type ConditionIterator
type CustomTypeValidator
type Error
func (e Error) Error() string
type Errors
func (es Errors) Error() string
func (es Errors) Errors() []error
type ISO3166Entry
type ISO693Entry
type InterfaceParamValidator
type Iterator
type ParamValidator
type ResultIterator
type UnsupportedTypeError
func (e *UnsupportedTypeError) Error() string
type Validator
```
#### Examples
###### IsURL
```go
println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
```
###### IsType
```go
println(govalidator.IsType("Bob", "string"))
println(govalidator.IsType(1, "int"))
i := 1
println(govalidator.IsType(&i, "*int"))
```
IsType can be used through the tag `type` which is essential for map validation:
```go
type User struct {
Name string `valid:"type(string)"`
Age int `valid:"type(int)"`
Meta interface{} `valid:"type(string)"`
}
result, err := govalidator.ValidateStruct(User{"Bob", 20, "meta"})
if err != nil {
println("error: " + err.Error())
}
println(result)
```
###### ToString
```go
type User struct {
FirstName string
LastName string
}
str := govalidator.ToString(&User{"John", "Juan"})
println(str)
```
###### Each, Map, Filter, Count for slices
Each iterates over the slice/array and calls Iterator for every item
```go
data := []interface{}{1, 2, 3, 4, 5}
var fn govalidator.Iterator = func(value interface{}, index int) {
println(value.(int))
}
govalidator.Each(data, fn)
```
```go
data := []interface{}{1, 2, 3, 4, 5}
var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
return value.(int) * 3
}
_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
```
```go
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
return value.(int)%2 == 0
}
_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
_ = govalidator.Count(data, fn) // result = 5
```
###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
```go
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
return str == "duck"
})
```
For completely custom validators (interface-based), see below.
Here is a list of available validators for struct fields (validator - used function):
```go
"email": IsEmail,
"url": IsURL,
"dialstring": IsDialString,
"requrl": IsRequestURL,
"requri": IsRequestURI,
"alpha": IsAlpha,
"utfletter": IsUTFLetter,
"alphanum": IsAlphanumeric,
"utfletternum": IsUTFLetterNumeric,
"numeric": IsNumeric,
"utfnumeric": IsUTFNumeric,
"utfdigit": IsUTFDigit,
"hexadecimal": IsHexadecimal,
"hexcolor": IsHexcolor,
"rgbcolor": IsRGBcolor,
"lowercase": IsLowerCase,
"uppercase": IsUpperCase,
"int": IsInt,
"float": IsFloat,
"null": IsNull,
"uuid": IsUUID,
"uuidv3": IsUUIDv3,
"uuidv4": IsUUIDv4,
"uuidv5": IsUUIDv5,
"creditcard": IsCreditCard,
"isbn10": IsISBN10,
"isbn13": IsISBN13,
"json": IsJSON,
"multibyte": IsMultibyte,
"ascii": IsASCII,
"printableascii": IsPrintableASCII,
"fullwidth": IsFullWidth,
"halfwidth": IsHalfWidth,
"variablewidth": IsVariableWidth,
"base64": IsBase64,
"datauri": IsDataURI,
"ip": IsIP,
"port": IsPort,
"ipv4": IsIPv4,
"ipv6": IsIPv6,
"dns": IsDNSName,
"host": IsHost,
"mac": IsMAC,
"latitude": IsLatitude,
"longitude": IsLongitude,
"ssn": IsSSN,
"semver": IsSemver,
"rfc3339": IsRFC3339,
"rfc3339WithoutZone": IsRFC3339WithoutZone,
"ISO3166Alpha2": IsISO3166Alpha2,
"ISO3166Alpha3": IsISO3166Alpha3,
"ulid": IsULID,
```
Validators with parameters
```go
"range(min|max)": Range,
"length(min|max)": ByteLength,
"runelength(min|max)": RuneLength,
"stringlength(min|max)": StringLength,
"matches(pattern)": StringMatches,
"in(string1|string2|...|stringN)": IsIn,
"rsapub(keylength)" : IsRsaPub,
"minstringlength(int): MinStringLength,
"maxstringlength(int): MaxStringLength,
```
Validators with parameters for any type
```go
"type(type)": IsType,
```
And here is small example of usage:
```go
type Post struct {
Title string `valid:"alphanum,required"`
Message string `valid:"duck,ascii"`
Message2 string `valid:"animal(dog)"`
AuthorIP string `valid:"ipv4"`
Date string `valid:"-"`
}
post := &Post{
Title: "My Example Post",
Message: "duck",
Message2: "dog",
AuthorIP: "123.234.54.3",
}
// Add your own struct validation tags
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
return str == "duck"
})
// Add your own struct validation tags with parameter
govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool {
species := params[0]
return str == species
})
govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$")
result, err := govalidator.ValidateStruct(post)
if err != nil {
println("error: " + err.Error())
}
println(result)
```
###### ValidateMap [#2](https://github.com/asaskevich/govalidator/pull/338)
If you want to validate maps, you can use the map to be validated and a validation map that contain the same tags used in ValidateStruct, both maps have to be in the form `map[string]interface{}`
So here is small example of usage:
```go
var mapTemplate = map[string]interface{}{
"name":"required,alpha",
"family":"required,alpha",
"email":"required,email",
"cell-phone":"numeric",
"address":map[string]interface{}{
"line1":"required,alphanum",
"line2":"alphanum",
"postal-code":"numeric",
},
}
var inputMap = map[string]interface{}{
"name":"Bob",
"family":"Smith",
"email":"foo@bar.baz",
"address":map[string]interface{}{
"line1":"",
"line2":"",
"postal-code":"",
},
}
result, err := govalidator.ValidateMap(inputMap, mapTemplate)
if err != nil {
println("error: " + err.Error())
}
println(result)
```
###### WhiteList
```go
// Remove all characters from string ignoring characters between "a" and "z"
println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
```
###### Custom validation functions
Custom validation using your own domain specific validators is also available - here's an example of how to use it:
```go
import "github.com/asaskevich/govalidator"
type CustomByteArray [6]byte // custom types are supported and can be validated
type StructWithCustomByteArray struct {
ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
Email string `valid:"email"`
CustomMinLength int `valid:"-"`
}
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, context interface{}) bool {
switch v := context.(type) { // you can type switch on the context interface being validated
case StructWithCustomByteArray:
// you can check and validate against some other field in the context,
// return early or not validate against the context at all your choice
case SomeOtherType:
// ...
default:
// expecting some other type? Throw/panic here or continue
}
switch v := i.(type) { // type switch on the struct field being validated
case CustomByteArray:
for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
if e != 0 {
return true
}
}
}
return false
})
govalidator.CustomTypeTagMap.Set("customMinLengthValidator", func(i interface{}, context interface{}) bool {
switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
case StructWithCustomByteArray:
return len(v.ID) >= v.CustomMinLength
}
return false
})
```
###### Loop over Error()
By default .Error() returns all errors in a single String. To access each error you can do this:
```go
if err != nil {
errs := err.(govalidator.Errors).Errors()
for _, e := range errs {
fmt.Println(e.Error())
}
}
```
###### Custom error messages
Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it:
```go
type Ticket struct {
Id int64 `json:"id"`
FirstName string `json:"firstname" valid:"required~First name is blank"`
}
```
#### Notes
Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
#### Support
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
#### What to contribute
If you don't know what to do, there are some features and functions that need to be done
- [ ] Refactor code
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
- [ ] Create actual list of contributors and projects that currently using this package
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
- [ ] Implement fuzzing testing
- [ ] Implement some struct/map/array utilities
- [ ] Implement map/array validation
- [ ] Implement benchmarking
- [ ] Implement batch of examples
- [ ] Look at forks for new features and fixes
#### Advice
Feel free to create what you want, but keep in mind when you implement new features:
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
- Public functions must be documented and described in source file and added to README.md to the list of available functions
- There are must be unit-tests for any new functions and improvements
## Credits
### Contributors
This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
* [Daniel Lohse](https://github.com/annismckenzie)
* [Attila Oláh](https://github.com/attilaolah)
* [Daniel Korner](https://github.com/Dadie)
* [Steven Wilkin](https://github.com/stevenwilkin)
* [Deiwin Sarjas](https://github.com/deiwin)
* [Noah Shibley](https://github.com/slugmobile)
* [Nathan Davies](https://github.com/nathj07)
* [Matt Sanford](https://github.com/mzsanford)
* [Simon ccl1115](https://github.com/ccl1115)
<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
### Backers
Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)]
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
### Sponsors
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)]
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
## License
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large)

View File

@@ -1,87 +0,0 @@
package govalidator
// Iterator is the function that accepts element of slice/array and its index
type Iterator func(interface{}, int)
// ResultIterator is the function that accepts element of slice/array and its index and returns any result
type ResultIterator func(interface{}, int) interface{}
// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
type ConditionIterator func(interface{}, int) bool
// ReduceIterator is the function that accepts two element of slice/array and returns result of merging those values
type ReduceIterator func(interface{}, interface{}) interface{}
// Some validates that any item of array corresponds to ConditionIterator. Returns boolean.
func Some(array []interface{}, iterator ConditionIterator) bool {
res := false
for index, data := range array {
res = res || iterator(data, index)
}
return res
}
// Every validates that every item of array corresponds to ConditionIterator. Returns boolean.
func Every(array []interface{}, iterator ConditionIterator) bool {
res := true
for index, data := range array {
res = res && iterator(data, index)
}
return res
}
// Reduce boils down a list of values into a single value by ReduceIterator
func Reduce(array []interface{}, iterator ReduceIterator, initialValue interface{}) interface{} {
for _, data := range array {
initialValue = iterator(initialValue, data)
}
return initialValue
}
// Each iterates over the slice and apply Iterator to every item
func Each(array []interface{}, iterator Iterator) {
for index, data := range array {
iterator(data, index)
}
}
// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
func Map(array []interface{}, iterator ResultIterator) []interface{} {
var result = make([]interface{}, len(array))
for index, data := range array {
result[index] = iterator(data, index)
}
return result
}
// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
func Find(array []interface{}, iterator ConditionIterator) interface{} {
for index, data := range array {
if iterator(data, index) {
return data
}
}
return nil
}
// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
var result = make([]interface{}, 0)
for index, data := range array {
if iterator(data, index) {
result = append(result, data)
}
}
return result
}
// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
func Count(array []interface{}, iterator ConditionIterator) int {
count := 0
for index, data := range array {
if iterator(data, index) {
count = count + 1
}
}
return count
}

View File

@@ -1,81 +0,0 @@
package govalidator
import (
"encoding/json"
"fmt"
"reflect"
"strconv"
)
// ToString convert the input to a string.
func ToString(obj interface{}) string {
res := fmt.Sprintf("%v", obj)
return res
}
// ToJSON convert the input to a valid JSON string
func ToJSON(obj interface{}) (string, error) {
res, err := json.Marshal(obj)
if err != nil {
res = []byte("")
}
return string(res), err
}
// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
func ToFloat(value interface{}) (res float64, err error) {
val := reflect.ValueOf(value)
switch value.(type) {
case int, int8, int16, int32, int64:
res = float64(val.Int())
case uint, uint8, uint16, uint32, uint64:
res = float64(val.Uint())
case float32, float64:
res = val.Float()
case string:
res, err = strconv.ParseFloat(val.String(), 64)
if err != nil {
res = 0
}
default:
err = fmt.Errorf("ToInt: unknown interface type %T", value)
res = 0
}
return
}
// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer.
func ToInt(value interface{}) (res int64, err error) {
val := reflect.ValueOf(value)
switch value.(type) {
case int, int8, int16, int32, int64:
res = val.Int()
case uint, uint8, uint16, uint32, uint64:
res = int64(val.Uint())
case float32, float64:
res = int64(val.Float())
case string:
if IsInt(val.String()) {
res, err = strconv.ParseInt(val.String(), 0, 64)
if err != nil {
res = 0
}
} else {
err = fmt.Errorf("ToInt: invalid numeric format %g", value)
res = 0
}
default:
err = fmt.Errorf("ToInt: unknown interface type %T", value)
res = 0
}
return
}
// ToBoolean convert the input string to a boolean.
func ToBoolean(str string) (bool, error) {
return strconv.ParseBool(str)
}

View File

@@ -1,3 +0,0 @@
package govalidator
// A package of validators and sanitizers for strings, structures and collections.

View File

@@ -1,47 +0,0 @@
package govalidator
import (
"sort"
"strings"
)
// Errors is an array of multiple errors and conforms to the error interface.
type Errors []error
// Errors returns itself.
func (es Errors) Errors() []error {
return es
}
func (es Errors) Error() string {
var errs []string
for _, e := range es {
errs = append(errs, e.Error())
}
sort.Strings(errs)
return strings.Join(errs, ";")
}
// Error encapsulates a name, an error and whether there's a custom error message or not.
type Error struct {
Name string
Err error
CustomErrorMessageExists bool
// Validator indicates the name of the validator that failed
Validator string
Path []string
}
func (e Error) Error() string {
if e.CustomErrorMessageExists {
return e.Err.Error()
}
errName := e.Name
if len(e.Path) > 0 {
errName = strings.Join(append(e.Path, e.Name), ".")
}
return errName + ": " + e.Err.Error()
}

View File

@@ -1,100 +0,0 @@
package govalidator
import (
"math"
)
// Abs returns absolute value of number
func Abs(value float64) float64 {
return math.Abs(value)
}
// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
func Sign(value float64) float64 {
if value > 0 {
return 1
} else if value < 0 {
return -1
} else {
return 0
}
}
// IsNegative returns true if value < 0
func IsNegative(value float64) bool {
return value < 0
}
// IsPositive returns true if value > 0
func IsPositive(value float64) bool {
return value > 0
}
// IsNonNegative returns true if value >= 0
func IsNonNegative(value float64) bool {
return value >= 0
}
// IsNonPositive returns true if value <= 0
func IsNonPositive(value float64) bool {
return value <= 0
}
// InRangeInt returns true if value lies between left and right border
func InRangeInt(value, left, right interface{}) bool {
value64, _ := ToInt(value)
left64, _ := ToInt(left)
right64, _ := ToInt(right)
if left64 > right64 {
left64, right64 = right64, left64
}
return value64 >= left64 && value64 <= right64
}
// InRangeFloat32 returns true if value lies between left and right border
func InRangeFloat32(value, left, right float32) bool {
if left > right {
left, right = right, left
}
return value >= left && value <= right
}
// InRangeFloat64 returns true if value lies between left and right border
func InRangeFloat64(value, left, right float64) bool {
if left > right {
left, right = right, left
}
return value >= left && value <= right
}
// InRange returns true if value lies between left and right border, generic type to handle int, float32, float64 and string.
// All types must the same type.
// False if value doesn't lie in range or if it incompatible or not comparable
func InRange(value interface{}, left interface{}, right interface{}) bool {
switch value.(type) {
case int:
intValue, _ := ToInt(value)
intLeft, _ := ToInt(left)
intRight, _ := ToInt(right)
return InRangeInt(intValue, intLeft, intRight)
case float32, float64:
intValue, _ := ToFloat(value)
intLeft, _ := ToFloat(left)
intRight, _ := ToFloat(right)
return InRangeFloat64(intValue, intLeft, intRight)
case string:
return value.(string) >= left.(string) && value.(string) <= right.(string)
default:
return false
}
}
// IsWhole returns true if value is whole number
func IsWhole(value float64) bool {
return math.Remainder(value, 1) == 0
}
// IsNatural returns true if value is natural number (positive and whole)
func IsNatural(value float64) bool {
return IsWhole(value) && IsPositive(value)
}

Some files were not shown because too many files have changed in this diff Show More