forked from adamm/autogits
Compare commits
254 Commits
group-revi
...
submodulem
Author | SHA256 | Date | |
---|---|---|---|
b6bb7f9968 | |||
e2abbfcc63 | |||
f6cb35acca | |||
f4386c3d12 | |||
f8594af8c6 | |||
b8ef69a5a7 | |||
c980b9f84d | |||
4651440457 | |||
7d58882ed8 | |||
e90ba95869 | |||
1015e79026 | |||
833cb8b430 | |||
a882ae283f | |||
305e90b254 | |||
c80683182d | |||
51cd4da97b | |||
cf71fe49d6 | |||
85a9a81804 | |||
72b979b587 | |||
bb4350519b | |||
62658e23a7 | |||
6a1f92af12 | |||
24ed21ce7d | |||
46a187a60e | |||
e0c7ea44ea | |||
f013180c4b | |||
b96b784b38 | |||
6864e95404 | |||
0ba4652595 | |||
8d0047649a | |||
2f180c264e | |||
7b87c4fd73 | |||
7d2233dd4a | |||
c30ae5750b | |||
ea2134c6e9 | |||
b22f418595 | |||
c4c9a16e7f | |||
5b1e6941c2 | |||
923bcd89db | |||
e96f4d343b | |||
bcb63fe1e9 | |||
f4e78e53d3 | |||
082db173f3 | |||
7e055c3169 | |||
7e59e527d8 | |||
518845b3d8 | |||
b091e0e98d | |||
cedb7c0e76 | |||
7209f9f519 | |||
bd5482d54e | |||
bc95d50378 | |||
fff996b497 | |||
2b67e6d80e | |||
5a875c19a0 | |||
538698373a | |||
84b8ca65ce | |||
a02358e641 | |||
33c9bffc2e | |||
4894c0d90a | |||
090c291f8a | |||
42cedb6267 | |||
f7229dfaf9 | |||
|
933ca9a3db
|
||
390cb89702 | |||
6cbeaef6f2 | |||
d146fb8c4e | |||
7e78ee83c1 | |||
17e925bfd7 | |||
878df15e58 | |||
c84af6286d | |||
d2cbb8fd34 | |||
8436a49c5d | |||
106e36d6bf | |||
0ec4986163 | |||
fb7f6adc98 | |||
231f29b065 | |||
3f3645a453 | |||
42e2713cd8 | |||
3d24dce5c0 | |||
0cefb45d8a | |||
ddbb824006 | |||
69dac4ec31 | |||
b7e03ab465 | |||
76aec3aabb | |||
19fb7e277b | |||
51261f1bc1 | |||
949810709d | |||
c012570e89 | |||
44a3b15a7d | |||
c5db1c83a7 | |||
9f0909621b | |||
b3914b04bd | |||
b43a19189e | |||
01b665230e | |||
1a07d4c541 | |||
22e44dff47 | |||
f9021d08b9 | |||
7a0394e51b | |||
518bc15696 | |||
51873eb048 | |||
4f33ce979c | |||
7cc4db2283 | |||
4d9e2f8cab | |||
ed4f27a19e | |||
e438b5b064 | |||
885bb7e537 | |||
977d75f6e9 | |||
42a9ee48e0 | |||
9333e5c3da | |||
5e29c88dc8 | |||
4f0f101620 | |||
253f009da3 | |||
5e66a14fa9 | |||
e79122e494 | |||
0b4b1a4e21 | |||
0019546e30 | |||
6438a8625a | |||
3928fa6429 | |||
e92ac4a592 | |||
a1520ebfb0 | |||
c8d65a3ae5 | |||
b849a72f31 | |||
568a2f3df8 | |||
30c8b2fe57 | |||
69b0f9a5ed | |||
a283d4f26f | |||
af898a6b8d | |||
b89cdb7664 | |||
d37bfaa9d3 | |||
90cca05b31 | |||
7c229500c1 | |||
290424c4a7 | |||
703fa101a4 | |||
66e4982e2d | |||
09b1c415dd | |||
629b941558 | |||
aa50481c00 | |||
bc714ee22d | |||
b8cc0357a7 | |||
aed0ac3ee9 | |||
cca3575596 | |||
69dcebcf74 | |||
e5d07f0ce6
|
|||
df9478a920
|
|||
7da9daddd5 | |||
cd0c3bc759 | |||
af096af507 | |||
d150c66427 | |||
3bef967023 | |||
9c3658b33e | |||
6968cbc942 | |||
2cb7a065a9 | |||
35058623a7 | |||
24fe165c46 | |||
1498438fee | |||
4653904ded | |||
bd87bf8ce3 | |||
364c3f4ab7 | |||
fd8b7f1bee | |||
da32adb16b | |||
1b5a0ad0c8 | |||
e78fdf4a09 | |||
0564a50fb5 | |||
4f7db36123 | |||
41d536ea1b | |||
91d915cc28 | |||
c7a300119e | |||
c5c3e1c115 | |||
c93788d0ee | |||
1e46f8d0ab | |||
9963ae90ef | |||
a9225bbd76 | |||
801fff6e22 | |||
b4b0d075be | |||
16c2eb7090 | |||
3264ad1589 | |||
cb64635aea | |||
aeb4c20744 | |||
da1df24666 | |||
6b3c613f14 | |||
eb997e1ae9 | |||
f52d72e04a | |||
23e2566843 | |||
0d0fcef7ac | |||
62a597718b | |||
327cb4ceaf | |||
aac475ad16 | |||
046a60a6ed | |||
dcf964bf7a | |||
bff5f1cab7 | |||
6d1ef184e0 | |||
e30d366f2f | |||
4a2fe06f05 | |||
210e7588f1 | |||
72b100124d | |||
996d36aaa8 | |||
82b5b105b1 | |||
248ec4d03c | |||
faa21f5453 | |||
21c4a7c1e0 | |||
f3f76e7d5b | |||
e341b630a2 | |||
58532b9b60 | |||
a697ccd0ca | |||
4bafe0b4ef | |||
7af2092ae1 | |||
32374f76c1 | |||
9403b563f6 | |||
bd492f8d92 | |||
fbc84d551d | |||
874a120f88 | |||
199396c210 | |||
f0de3ad54a | |||
bfeac63c57 | |||
d65f37739c | |||
5895e3d02c | |||
0e036b5ec6 | |||
1d1602852c | |||
9b5013ee45 | |||
ed815c3ad1 | |||
8645063e8d | |||
2d044d5664 | |||
51ba81f257 | |||
bb7a247f66 | |||
c1f71253a4 | |||
96e1c26600 | |||
9d9964df11 | |||
e257b113b9 | |||
11e0bbaed1 | |||
fb430d8c76 | |||
7ed2a7082d | |||
ba7686189e | |||
9dcd25b69a | |||
881fad36a0 | |||
29906e22d2 | |||
d89c77e22d | |||
f91c61cd20 | |||
06aef50047 | |||
52a5cdea94 | |||
d3f1b36676 | |||
5ea5f05b02 | |||
5877081280 | |||
c4ce974ddf | |||
65c718e73b | |||
a8e6c175c0 | |||
044416cd2a | |||
009cc88d54 | |||
da1f4f4fa0 | |||
cfad21e1a3 | |||
5eb54d40e0 | |||
80ff036acb | |||
|
2ed4f0d05f
|
||
|
23ed9b830d
|
||
|
4604aaeeba
|
21
.gitattributes
vendored
Normal file
21
.gitattributes
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.bsp filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.gem filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.jar filter=lfs diff=lfs merge=lfs -text
|
||||
*.lz filter=lfs diff=lfs merge=lfs -text
|
||||
*.lzma filter=lfs diff=lfs merge=lfs -text
|
||||
*.oxt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pdf filter=lfs diff=lfs merge=lfs -text
|
||||
*.png filter=lfs diff=lfs merge=lfs -text
|
||||
*.rpm filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.ttf filter=lfs diff=lfs merge=lfs -text
|
||||
*.txz filter=lfs diff=lfs merge=lfs -text
|
||||
*.whl filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*.changes merge=merge-changes
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,2 +1,6 @@
|
||||
mock
|
||||
node_modules
|
||||
*.obscpio
|
||||
autogits-tmp.tar.zst
|
||||
*.osc
|
||||
*.conf
|
||||
|
11
README.md
11
README.md
@@ -22,4 +22,15 @@ Bugs
|
||||
Report bugs to issue tracker at https://src.opensuse.org/adamm/autogits
|
||||
|
||||
|
||||
Build Status
|
||||
------------
|
||||
|
||||
main branch build status:
|
||||
|
||||

|
||||
|
||||
Devel project build status:
|
||||
|
||||

|
||||
|
||||
|
||||
|
15
_service
Normal file
15
_service
Normal file
@@ -0,0 +1,15 @@
|
||||
<services>
|
||||
<!-- workaround, go_modules needs a tar and obs_scm doesn't take file://. -->
|
||||
<service name="roast" mode="manual">
|
||||
<param name="target">.</param>
|
||||
<param name="reproducible">true</param>
|
||||
<param name="outfile">autogits-tmp.tar.zst</param>
|
||||
<param name="exclude">autogits-tmp.tar.zst</param>
|
||||
</service>
|
||||
<service name="go_modules" mode="manual">
|
||||
<param name="basename">./</param>
|
||||
<param name="compression">zst</param>
|
||||
<param name="vendorname">vendor</param>
|
||||
</service>
|
||||
</services>
|
||||
|
10
autogits.changes
Normal file
10
autogits.changes
Normal file
@@ -0,0 +1,10 @@
|
||||
-------------------------------------------------------------------
|
||||
Wed Sep 11 16:00:58 UTC 2024 - Adam Majer <adam.majer@suse.de>
|
||||
|
||||
- enable Authorization bearer token checks
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Sep 11 14:10:18 UTC 2024 - Adam Majer <adam.majer@suse.de>
|
||||
|
||||
- rabbitmq publisher
|
||||
|
132
autogits.spec
132
autogits.spec
@@ -22,17 +22,20 @@ Release: 0
|
||||
Summary: GitWorkflow utilities
|
||||
License: GPL-2.0-or-later
|
||||
URL: https://src.opensuse.org/adamm/autogits
|
||||
Source: autogits-%{version}.tar.zst
|
||||
Source1: vendor.tar.zst
|
||||
BuildRequires: golang-packaging
|
||||
BuildRequires: systemd-rpm-macros
|
||||
BuildRequires: zstd
|
||||
BuildRequires: go
|
||||
%{?systemd_ordering}
|
||||
|
||||
%description
|
||||
Git Workflow tooling and utilities enabling automated handing of OBS projects
|
||||
as git repositories
|
||||
|
||||
%package -n hujson
|
||||
Summary: HuJSON to JSON parser
|
||||
|
||||
%description -n hujson
|
||||
HuJSON to JSON parser, using stdin -> stdout pipe
|
||||
|
||||
%package -n gitea-events-rabbitmq-publisher
|
||||
Summary: Publishes Gitea webhook data via RabbitMQ
|
||||
|
||||
@@ -41,19 +44,87 @@ Listens on an HTTP socket and publishes Gitea events on a RabbitMQ instance
|
||||
with a topic
|
||||
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
|
||||
|
||||
|
||||
%package -n doc
|
||||
Summary: Common documentation files
|
||||
|
||||
%description -n doc
|
||||
Common documentation files
|
||||
|
||||
|
||||
%package -n group-review
|
||||
Summary: Reviews of groups defined in ProjectGit
|
||||
|
||||
%description -n group-review
|
||||
Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
|
||||
%package -n obs-staging-bot
|
||||
Summary: Build a PR against a ProjectGit, if review is requested
|
||||
|
||||
%description -n obs-staging-bot
|
||||
Build a PR against a ProjectGit, if review is requested.
|
||||
|
||||
|
||||
%package -n obs-status-service
|
||||
Summary: Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
%description -n obs-status-service
|
||||
Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
|
||||
%package -n workflow-direct
|
||||
Summary: Keep ProjectGit in sync for a devel project
|
||||
|
||||
%description -n workflow-direct
|
||||
Keep ProjectGit in sync with packages in the organization of a devel project
|
||||
|
||||
|
||||
%package -n workflow-pr
|
||||
Summary: Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
%description -n workflow-pr
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
|
||||
%prep
|
||||
%autosetup -p1
|
||||
cd gitea-events-rabbitmq-publisher && tar x --zstd -f %{SOURCE1}
|
||||
cp -r /home/abuild/rpmbuild/SOURCES/* ./
|
||||
|
||||
%build
|
||||
go build \
|
||||
-C hujson \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea-events-rabbitmq-publisher \
|
||||
-mod=vendor \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C group-review \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-staging-bot \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-status-service \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C workflow-direct \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C workflow-pr \
|
||||
-buildmode=pie
|
||||
|
||||
%install
|
||||
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
|
||||
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
|
||||
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
|
||||
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
|
||||
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
|
||||
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
|
||||
install -D -m0755 hujson/hujson %{buildroot}%{_bindir}/hujson
|
||||
|
||||
%pre -n gitea-events-rabbitmq-publisher
|
||||
%service_add_pre gitea-events-rabbitmq-publisher.service
|
||||
@@ -67,11 +138,56 @@ install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service
|
||||
%postun -n gitea-events-rabbitmq-publisher
|
||||
%service_del_postun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%pre -n obs-staging-bot
|
||||
%service_add_pre obs-staging-bot.service
|
||||
|
||||
%post -n obs-staging-bot
|
||||
%service_add_post obs-staging-bot.service
|
||||
|
||||
%preun -n obs-staging-bot
|
||||
%service_del_preun obs-staging-bot.service
|
||||
|
||||
%postun -n obs-staging-bot
|
||||
%service_del_postun obs-staging-bot.service
|
||||
|
||||
%files -n gitea-events-rabbitmq-publisher
|
||||
%license COPYING
|
||||
%doc gitea-events-rabbitmq-publisher/README.md
|
||||
%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%changelog
|
||||
%files -n doc
|
||||
%license COPYING
|
||||
%doc doc/README.md
|
||||
%doc doc/workflows.md
|
||||
|
||||
%files -n group-review
|
||||
%license COPYING
|
||||
%doc group-review/README.md
|
||||
%{_bindir}/group-review
|
||||
|
||||
%files -n hujson
|
||||
%license COPYING
|
||||
%{_bindir}/hujson
|
||||
|
||||
%files -n obs-staging-bot
|
||||
%license COPYING
|
||||
%doc obs-staging-bot/README.md
|
||||
%{_bindir}/obs-staging-bot
|
||||
%{_unitdir}/obs-staging-bot.service
|
||||
|
||||
%files -n obs-status-service
|
||||
%license COPYING
|
||||
%doc obs-status-service/README.md
|
||||
%{_bindir}/obs-status-service
|
||||
|
||||
%files -n workflow-direct
|
||||
%license COPYING
|
||||
%doc workflow-direct/README.md
|
||||
%{_bindir}/workflow-direct
|
||||
|
||||
%files -n workflow-pr
|
||||
%license COPYING
|
||||
%doc workflow-pr/README.md
|
||||
%{_bindir}/workflow-pr
|
||||
|
||||
|
@@ -1,15 +0,0 @@
|
||||
all: build
|
||||
|
||||
api.json:
|
||||
curl -o api.json https://src.opensuse.org/swagger.v1.json
|
||||
|
||||
gitea-generated/client/gitea_api_client.go:: api.json
|
||||
[ -d gitea-generated ] || mkdir gitea-generated
|
||||
podman run --rm -v $$(pwd):/api ghcr.io/go-swagger/go-swagger generate client -f /api/api.json -t /api/gitea-generated
|
||||
|
||||
api: gitea-generated/client/gitea_api_client.go mock_gitea_utils.go
|
||||
go generate
|
||||
|
||||
build: api
|
||||
go build
|
||||
|
@@ -1,130 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
GitProjectName []string
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
}
|
||||
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
|
||||
func ReadConfig(reader io.Reader) (*ConfigFile, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error reading config data: %w", err)
|
||||
}
|
||||
|
||||
config := ConfigFile{}
|
||||
if err := json.Unmarshal(data, &config.GitProjectName); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing Git Project paths: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadConfigFile(filename string) (*ConfigFile, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadConfig(file)
|
||||
}
|
||||
|
||||
func ReadWorkflowConfig(gitea Gitea, git_project string) (*AutogitConfig, error) {
|
||||
hash := strings.Split(git_project, "#")
|
||||
branch := ""
|
||||
if len(hash) > 1 {
|
||||
branch = hash[1]
|
||||
}
|
||||
|
||||
a := strings.Split(hash[0], "/")
|
||||
prjGitRepo := DefaultGitPrj
|
||||
switch len(a) {
|
||||
case 1:
|
||||
case 2:
|
||||
prjGitRepo = a[1]
|
||||
default:
|
||||
return nil, fmt.Errorf("Missing org/repo in projectgit: %s", git_project)
|
||||
}
|
||||
|
||||
data, _, err := gitea.GetRepositoryFileContent(a[0], prjGitRepo, branch, "workflow.config")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching 'workflow.config': %w", err)
|
||||
}
|
||||
|
||||
var config AutogitConfig
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing config file: %w", err)
|
||||
}
|
||||
|
||||
config.GitProjectName = a[0] + "/" + prjGitRepo
|
||||
if len(branch) > 0 {
|
||||
config.GitProjectName = config.GitProjectName + "#" + branch
|
||||
}
|
||||
if len(config.Organization) < 1 {
|
||||
config.Organization = a[0]
|
||||
}
|
||||
log.Println(config)
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ResolveWorkflowConfigs(gitea Gitea, config *ConfigFile) (AutogitConfigs, error) {
|
||||
configs := make([]*AutogitConfig, 0, len(config.GitProjectName))
|
||||
for _, git_project := range config.GitProjectName {
|
||||
c, err := ReadWorkflowConfig(gitea, git_project)
|
||||
if err != nil {
|
||||
// can't sync, so ignore for now
|
||||
log.Println(err)
|
||||
} else {
|
||||
configs = append(configs, c)
|
||||
}
|
||||
}
|
||||
|
||||
return configs, nil
|
||||
}
|
||||
|
||||
func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *AutogitConfig {
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@@ -1,104 +0,0 @@
|
||||
// Code generated by go-swagger; DO NOT EDIT.
|
||||
|
||||
package issue
|
||||
|
||||
// This file was generated by the swagger tool.
|
||||
// Editing this file might prove futile when you re-run the swagger generate command
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/go-openapi/runtime"
|
||||
"github.com/go-openapi/strfmt"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
// IssueSearchIssuesReader is a Reader for the IssueSearchIssues structure.
|
||||
type IssueSearchIssuesReader struct {
|
||||
formats strfmt.Registry
|
||||
}
|
||||
|
||||
// ReadResponse reads a server response into the received o.
|
||||
func (o *IssueSearchIssuesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
|
||||
switch response.Code() {
|
||||
case 200:
|
||||
result := NewIssueSearchIssuesOK()
|
||||
if err := result.readResponse(response, consumer, o.formats); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
default:
|
||||
return nil, runtime.NewAPIError("[GET /repos/issues/search] issueSearchIssues", response, response.Code())
|
||||
}
|
||||
}
|
||||
|
||||
// NewIssueSearchIssuesOK creates a IssueSearchIssuesOK with default headers values
|
||||
func NewIssueSearchIssuesOK() *IssueSearchIssuesOK {
|
||||
return &IssueSearchIssuesOK{}
|
||||
}
|
||||
|
||||
/*
|
||||
IssueSearchIssuesOK describes a response with status code 200, with default header values.
|
||||
|
||||
IssueList
|
||||
*/
|
||||
type IssueSearchIssuesOK struct {
|
||||
Payload []*models.Issue
|
||||
}
|
||||
|
||||
// IsSuccess returns true when this issue search issues o k response has a 2xx status code
|
||||
func (o *IssueSearchIssuesOK) IsSuccess() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// IsRedirect returns true when this issue search issues o k response has a 3xx status code
|
||||
func (o *IssueSearchIssuesOK) IsRedirect() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsClientError returns true when this issue search issues o k response has a 4xx status code
|
||||
func (o *IssueSearchIssuesOK) IsClientError() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsServerError returns true when this issue search issues o k response has a 5xx status code
|
||||
func (o *IssueSearchIssuesOK) IsServerError() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsCode returns true when this issue search issues o k response a status code equal to that given
|
||||
func (o *IssueSearchIssuesOK) IsCode(code int) bool {
|
||||
return code == 200
|
||||
}
|
||||
|
||||
// Code gets the status code for the issue search issues o k response
|
||||
func (o *IssueSearchIssuesOK) Code() int {
|
||||
return 200
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) Error() string {
|
||||
payload, _ := json.Marshal(o.Payload)
|
||||
return fmt.Sprintf("[GET /repos/issues/search][%d] issueSearchIssuesOK %s", 200, payload)
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) String() string {
|
||||
payload, _ := json.Marshal(o.Payload)
|
||||
return fmt.Sprintf("[GET /repos/issues/search][%d] issueSearchIssuesOK %s", 200, payload)
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) GetPayload() []*models.Issue {
|
||||
return o.Payload
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
|
||||
|
||||
// response payload
|
||||
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@@ -1,50 +0,0 @@
|
||||
// Code generated by go-swagger; DO NOT EDIT.
|
||||
|
||||
package models
|
||||
|
||||
// This file was generated by the swagger tool.
|
||||
// Editing this file might prove futile when you re-run the swagger generate command
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// AddCollaboratorOption AddCollaboratorOption options when adding a user as a collaborator of a repository
|
||||
//
|
||||
// swagger:model AddCollaboratorOption
|
||||
type AddCollaboratorOption struct {
|
||||
|
||||
// permission
|
||||
Permission string `json:"permission,omitempty"`
|
||||
}
|
||||
|
||||
// Validate validates this add collaborator option
|
||||
func (m *AddCollaboratorOption) Validate(formats strfmt.Registry) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ContextValidate validates this add collaborator option based on context it is used
|
||||
func (m *AddCollaboratorOption) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalBinary interface implementation
|
||||
func (m *AddCollaboratorOption) MarshalBinary() ([]byte, error) {
|
||||
if m == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return swag.WriteJSON(m)
|
||||
}
|
||||
|
||||
// UnmarshalBinary interface implementation
|
||||
func (m *AddCollaboratorOption) UnmarshalBinary(b []byte) error {
|
||||
var res AddCollaboratorOption
|
||||
if err := swag.ReadJSON(b, &res); err != nil {
|
||||
return err
|
||||
}
|
||||
*m = res
|
||||
return nil
|
||||
}
|
@@ -1,317 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
rabbitmq "github.com/rabbitmq/amqp091-go"
|
||||
)
|
||||
|
||||
const RequestType_CreateBrachTag = "create"
|
||||
const RequestType_DeleteBranchTag = "delete"
|
||||
const RequestType_Fork = "fork"
|
||||
const RequestType_Issue = "issues"
|
||||
const RequestType_IssueAssign = "issue_assign"
|
||||
const RequestType_IssueComment = "issue_comment"
|
||||
const RequestType_IssueLabel = "issue_label"
|
||||
const RequestType_IssueMilestone = "issue_milestone"
|
||||
const RequestType_Push = "push"
|
||||
const RequestType_Repository = "repository"
|
||||
const RequestType_Release = "release"
|
||||
const RequestType_PR = "pull_request"
|
||||
const RequestType_PRAssign = "pull_request_assign"
|
||||
const RequestType_PRLabel = "pull_request_label"
|
||||
const RequestType_PRComment = "pull_request_comment"
|
||||
const RequestType_PRMilestone = "pull_request_milestone"
|
||||
const RequestType_PRSync = "pull_request_sync"
|
||||
const RequestType_PRReviewAccepted = "pull_request_review_approved"
|
||||
const RequestType_PRReviewRejected = "pull_request_review_rejected"
|
||||
const RequestType_PRReviewRequest = "pull_request_review_request"
|
||||
const RequestType_PRReviewComment = "pull_request_review_comment"
|
||||
const RequestType_Wiki = "wiki"
|
||||
|
||||
type RequestProcessor interface {
|
||||
ProcessFunc(*Request) error
|
||||
}
|
||||
|
||||
type ListenDefinitions struct {
|
||||
RabbitURL *url.URL // amqps://user:password@host/queue
|
||||
|
||||
GitAuthor string
|
||||
Handlers map[string]RequestProcessor
|
||||
Orgs []string
|
||||
|
||||
topics []string
|
||||
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
|
||||
}
|
||||
|
||||
type RabbitMessage rabbitmq.Delivery
|
||||
|
||||
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
|
||||
for {
|
||||
topic, ok := <-l.topicSubChanges
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
log.Println(" topic change:", topic)
|
||||
switch topic[0] {
|
||||
case '+':
|
||||
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
case '-':
|
||||
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
default:
|
||||
log.Println("Ignoring topic change.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
queueName := l.RabbitURL.Path
|
||||
l.RabbitURL.Path = ""
|
||||
|
||||
if len(queueName) > 0 && queueName[0] == '/' {
|
||||
queueName = queueName[1:]
|
||||
}
|
||||
|
||||
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
|
||||
ServerName: l.RabbitURL.Hostname(),
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
|
||||
}
|
||||
defer connection.Close()
|
||||
|
||||
ch, err := connection.Channel()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot create a channel. Err: %w", err)
|
||||
}
|
||||
defer ch.Close()
|
||||
|
||||
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
|
||||
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
|
||||
}
|
||||
|
||||
var q rabbitmq.Queue
|
||||
if len(queueName) == 0 {
|
||||
q, err = ch.QueueDeclare("", false, true, true, false, nil)
|
||||
} else {
|
||||
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
|
||||
if err != nil {
|
||||
log.Printf("queue not found .. trying to create it: %v\n", err)
|
||||
if ch.IsClosed() {
|
||||
ch, err = connection.Channel()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
|
||||
}
|
||||
}
|
||||
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
|
||||
|
||||
if err != nil {
|
||||
log.Printf("can't create persistent queue ... falling back to temporaty queue: %v\n", err)
|
||||
if ch.IsClosed() {
|
||||
ch, err = connection.Channel()
|
||||
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
|
||||
}
|
||||
q, err = ch.QueueDeclare("", false, true, true, false, nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot declare queue. Err: %w", err)
|
||||
}
|
||||
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
|
||||
|
||||
log.Println(" -- listening to topics:")
|
||||
l.topicSubChanges = make(chan string)
|
||||
defer close(l.topicSubChanges)
|
||||
go l.processTopicChanges(ch, q.Name)
|
||||
|
||||
for _, topic := range l.topics {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
|
||||
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot start consumer. Err: %w", err)
|
||||
}
|
||||
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
|
||||
|
||||
for {
|
||||
msg, ok := <-msgs
|
||||
if !ok {
|
||||
return fmt.Errorf("channel/connection closed?\n")
|
||||
}
|
||||
|
||||
msgCh <- RabbitMessage(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectAndProcessRabbitMQ(log *log.Logger, ch chan<- RabbitMessage) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println(r)
|
||||
log.Println("'crash' RabbitMQ worker. Recovering... reconnecting...")
|
||||
time.Sleep(5 * time.Second)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
err := l.processRabbitMQ(ch)
|
||||
if err != nil {
|
||||
log.Printf("Error in RabbitMQ connection. %#v", err)
|
||||
log.Println("Reconnecting in 2 seconds...")
|
||||
time.Sleep(2 * time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectToRabbitMQ(log *log.Logger) chan RabbitMessage {
|
||||
ch := make(chan RabbitMessage, 100)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func ProcessEvent(f RequestProcessor, request *Request) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
log.Println(err)
|
||||
}
|
||||
log.Println(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := f.ProcessFunc(request); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) generateTopics() []string {
|
||||
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
|
||||
scope := "suse"
|
||||
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
|
||||
scope = "opensuse"
|
||||
}
|
||||
|
||||
for _, org := range l.Orgs {
|
||||
for requestType, _ := range l.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(topics)
|
||||
return slices.Compact(topics)
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) UpdateTopics() {
|
||||
newTopics := l.generateTopics()
|
||||
|
||||
j := 0
|
||||
next_new_topic:
|
||||
for i := 0; i < len(newTopics); i++ {
|
||||
topic := newTopics[i]
|
||||
|
||||
for j < len(l.topics) {
|
||||
cmp := strings.Compare(topic, l.topics[j])
|
||||
|
||||
if cmp == 0 {
|
||||
j++
|
||||
continue next_new_topic
|
||||
}
|
||||
|
||||
if cmp < 0 {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
break
|
||||
}
|
||||
|
||||
l.topicSubChanges <- "-" + l.topics[j]
|
||||
j++
|
||||
}
|
||||
|
||||
if j == len(l.topics) {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
|
||||
log.Println("RabbitMQ connection:", l.RabbitURL.String())
|
||||
log.Println(len(l.Handlers), len(l.Orgs))
|
||||
|
||||
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
l.topics = l.generateTopics()
|
||||
ch := l.connectToRabbitMQ(log.Default())
|
||||
|
||||
for {
|
||||
msg, ok := <-ch
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Println("event:", msg.RoutingKey)
|
||||
|
||||
route := strings.Split(msg.RoutingKey, ".")
|
||||
if len(route) > 3 {
|
||||
reqType := route[3]
|
||||
org := route[2]
|
||||
|
||||
if !slices.Contains(l.Orgs, org) {
|
||||
log.Println("Got event for unhandeled org:", org)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Println("org:", org, "type:", reqType)
|
||||
if handler, found := l.Handlers[reqType]; found {
|
||||
/* h, err := CreateRequestHandler()
|
||||
if err != nil {
|
||||
log.Println("Cannot create request handler", err)
|
||||
continue
|
||||
}
|
||||
*/
|
||||
req, err := ParseRequestJSON(reqType, msg.Body)
|
||||
if err != nil {
|
||||
log.Println("Error parsing request JSON:", err)
|
||||
continue
|
||||
} else {
|
||||
log.Println("processing req", req.Type)
|
||||
// h.Request = req
|
||||
ProcessEvent(handler, req)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,48 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
handlers []string
|
||||
orgs1, orgs2 []string
|
||||
|
||||
topicDelta []string
|
||||
}{
|
||||
{
|
||||
name: "no handlers, no orgs",
|
||||
},
|
||||
{
|
||||
name: "adding one org",
|
||||
handlers: []string{"foo"},
|
||||
orgs2: []string{"newOrg"},
|
||||
topicDelta: []string{"+suse"},
|
||||
},
|
||||
}
|
||||
|
||||
u, _ := url.Parse("amqps://rabbit.example.com")
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := ListenDefinitions{
|
||||
Orgs: test.orgs1,
|
||||
Handlers: make(map[string]RequestProcessor),
|
||||
topicSubChanges: make(chan string, len(test.topicDelta)*10),
|
||||
RabbitURL: u,
|
||||
}
|
||||
|
||||
for _, r := range test.handlers {
|
||||
l.Handlers[r] = nil
|
||||
}
|
||||
|
||||
l.UpdateTopics()
|
||||
if len(l.topicSubChanges) != len(test.topicDelta) {
|
||||
t.Fatal("topicSubChanges != topicDelta")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,333 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRInfo struct {
|
||||
PR *models.PullRequest
|
||||
Reviews *PRReviews
|
||||
}
|
||||
|
||||
type PRSet struct {
|
||||
PRs []PRInfo
|
||||
Config *AutogitConfig
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []PRInfo, config *AutogitConfig) ([]PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
retSet := []PRInfo{PRInfo{PR: pr}}
|
||||
|
||||
// only need to extact there on PrjGit PR
|
||||
if pr.Base.Repo.Name == config.GitProjectName && pr.Base.Repo.Owner.UserName == config.Organization {
|
||||
_, refPRs := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
for _, prdata := range refPRs {
|
||||
pr, err := gitea.GetPullRequest(prdata.Org, prdata.Repo, prdata.Num)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := readPRData(gitea, pr, slices.Concat(currentSet, retSet), config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
retSet = slices.Concat(retSet, data)
|
||||
}
|
||||
}
|
||||
|
||||
return retSet, nil
|
||||
}
|
||||
|
||||
func FetchPRSet(gitea GiteaPRFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
var pr *models.PullRequest
|
||||
var err error
|
||||
|
||||
if org != config.Organization || repo != config.GitProjectName {
|
||||
if pr, err = gitea.GetAssociatedPrjGitPR(config.Organization, config.GitProjectName, org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pr == nil {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
prs, err := readPRData(gitea, pr, nil, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRSet{PRs: prs, Config: config}, nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
|
||||
return pr.Base.Repo.Name == rs.Config.GitProjectName && pr.Base.Repo.Owner.UserName == rs.Config.Organization
|
||||
}
|
||||
|
||||
func (rs *PRSet) GetPrjGitPR() (*models.PullRequest, error) {
|
||||
var ret *models.PullRequest
|
||||
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
if ret == nil {
|
||||
ret = prinfo.PR
|
||||
} else {
|
||||
return nil, errors.New("Multiple PrjGit PRs in one review set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ret != nil {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("No PrjGit PR found")
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsConsistent() bool {
|
||||
prjpr, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
_, prjpr_set := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(prjpr.Body)))
|
||||
if len(prjpr_set) != len(rs.PRs)-1 { // 1 to many mapping
|
||||
return false
|
||||
}
|
||||
|
||||
next_rs:
|
||||
for _, prinfo := range rs.PRs {
|
||||
if prjpr == prinfo.PR {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, pr := range prjpr_set {
|
||||
if prinfo.PR.Base.Repo.Owner.UserName == pr.Org && prinfo.PR.Base.Repo.Name == pr.Repo && prinfo.PR.Index == pr.Num {
|
||||
continue next_rs
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintainers MaintainershipData) error {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
for _, pr := range rs.PRs {
|
||||
reviewers := []string{}
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
if len(rs.PRs) == 1 {
|
||||
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers())
|
||||
}
|
||||
} else {
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(), maintainers.ListPackageMaintainers(pkg))
|
||||
}
|
||||
|
||||
// submitters do not need to review their own work
|
||||
if idx := slices.Index(reviewers, pr.PR.User.UserName); idx != -1 {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
}
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
reviews, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for idx := 0; idx < len(reviewers); {
|
||||
user := reviewers[idx]
|
||||
if reviews.HasPendingReviewBy(user) || reviews.IsReviewedBy(user) {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
} else {
|
||||
idx++
|
||||
}
|
||||
}
|
||||
|
||||
// get maintainers associated with the PR too
|
||||
if len(reviewers) > 0 {
|
||||
if _, err := gitea.RequestReviews(pr.PR, reviewers...); err != nil {
|
||||
return fmt.Errorf("Cannot create reviews on %s/%s#%d for [%s]: %w", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", "), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData) bool {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
is_reviewed := false
|
||||
for _, pr := range rs.PRs {
|
||||
var reviewers []string
|
||||
var pkg string
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
pkg = ""
|
||||
} else {
|
||||
reviewers = configReviewers.Pkg
|
||||
pkg = pr.PR.Base.Repo.Name
|
||||
}
|
||||
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
is_reviewed = r.IsApproved()
|
||||
if !is_reviewed {
|
||||
return false
|
||||
}
|
||||
|
||||
if is_reviewed = maintainers.IsApproved(pkg, r.reviews); !is_reviewed {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return is_reviewed
|
||||
}
|
||||
|
||||
func (rs *PRSet) Merge(author, email string) error {
|
||||
prjgit, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gh := GitHandlerGeneratorImpl{}
|
||||
git, err := gh.CreateGitHandler(author, email, prjgit.Base.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", "--depth", "1", prjgit.Base.Repo.SSHURL, DefaultGitPrj)
|
||||
git.GitExecOrPanic(DefaultGitPrj, "fetch", "origin", prjgit.Base.Sha, prjgit.Head.Sha)
|
||||
|
||||
// if other changes merged, check if we have conflicts
|
||||
rev := strings.TrimSpace(git.GitExecWithOutputOrPanic(DefaultGitPrj, "merge-base", "HEAD", prjgit.Base.Sha, prjgit.Head.Sha))
|
||||
if rev != prjgit.Base.Sha {
|
||||
return fmt.Errorf("Base.Sha (%s) not yet merged into project-git. Aborting merge.", prjgit.Base.Sha)
|
||||
}
|
||||
/*
|
||||
rev := git.GitExecWithOutputOrPanic(common.DefaultGitPrj, "rev-list", "-1", "HEAD")
|
||||
if rev != prjgit.Base.Sha {
|
||||
panic("FIXME")
|
||||
}
|
||||
*/
|
||||
msg := "merging"
|
||||
|
||||
err = git.GitExec(DefaultGitPrj, "merge", "--no-ff", "-m", msg, prjgit.Head.Sha)
|
||||
if err != nil {
|
||||
status, statusErr := git.GitStatus(DefaultGitPrj)
|
||||
if statusErr != nil {
|
||||
return fmt.Errorf("Failed to merge: %w . Status also failed: %w", err, statusErr)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
|
||||
submodules, err := git.GitSubmoduleList(DefaultGitPrj, "MERGE_HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
for i := range mergedSubs {
|
||||
if path.Base(mergedSubs[i].Path) == subName {
|
||||
filteredSubs = append(filteredSubs, mergedSubs[i])
|
||||
continue nextSub
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("Cannot find submodule for path: %s", subName)
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), DefaultGitPrj, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
os.CopyFS("/tmp/test", os.DirFS(git.GetPath()))
|
||||
|
||||
git.GitExecOrPanic(DefaultGitPrj, "add", ".gitmodules")
|
||||
git.GitExecOrPanic(DefaultGitPrj, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FF all non-prj git
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", prinfo.PR.Base.Repo.SSHURL, prinfo.PR.Base.Name)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "fetch", "origin", prinfo.PR.Head.Sha)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "merge", "--ff", prinfo.PR.Head.Sha)
|
||||
}
|
||||
|
||||
// push changes
|
||||
git.GitExecOrPanic(DefaultGitPrj, "push", "origin")
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "push", "origin")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@@ -1,554 +0,0 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestPR(t *testing.T) {
|
||||
baseConfig := common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
Organization: "foo",
|
||||
GitProjectName: "barPrj",
|
||||
}
|
||||
|
||||
type prdata struct {
|
||||
pr *models.PullRequest
|
||||
pr_err error
|
||||
reviews []*models.PullReview
|
||||
review_error error
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
data []prdata
|
||||
api_error string
|
||||
|
||||
resLen int
|
||||
reviewed bool
|
||||
consistentSet bool
|
||||
prjGitPRIndex int
|
||||
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRFetcher) (*common.PRSet, error)
|
||||
}{
|
||||
{
|
||||
name: "Error fetching PullRequest",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("Missing PR")},
|
||||
},
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Error fetching PullRequest in PrjGit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("missing PR")},
|
||||
{pr: &models.PullRequest{Body: "", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Error fetching prjgit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
|
||||
{
|
||||
name: "Review set is consistent: 1pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "some desc", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "some other desc\nPR: foo/fer#33", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set of prjgit PR is consistent",
|
||||
data: []prdata{
|
||||
{
|
||||
pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}},
|
||||
reviews: []*models.PullReview{
|
||||
{Body: "LGTM", User: &models.User{UserName: "m2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: "super2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: common.Bot_BuildReview}, State: common.ReviewStateApproved},
|
||||
},
|
||||
},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet(mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#222", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo2#41", Index: 20, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#20", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 2,
|
||||
consistentSet: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaPRChecker(ctl)
|
||||
// reviewer_mock := mock_common.NewMockGiteaReviewRequester(ctl)
|
||||
|
||||
if test.reviewSetFetcher == nil { // if we are fetching the prjgit directly, the these mocks are not called
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(test.data[test.prjGitPRIndex].pr, test.data[test.prjGitPRIndex].pr_err)
|
||||
} else if test.prjGitPRIndex < 0 {
|
||||
// no prjgit PR
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
var test_err error
|
||||
for _, data := range test.data {
|
||||
pr_mock.EXPECT().GetPullRequest(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.pr, data.pr_err).AnyTimes()
|
||||
if data.pr_err != nil {
|
||||
test_err = data.pr_err
|
||||
}
|
||||
review_mock.EXPECT().GetPullRequestReviews(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.reviews, data.review_error).AnyTimes()
|
||||
}
|
||||
|
||||
var res *common.PRSet
|
||||
var err error
|
||||
|
||||
if test.reviewSetFetcher != nil {
|
||||
res, err = test.reviewSetFetcher(pr_mock)
|
||||
} else {
|
||||
res, err = common.FetchPRSet(pr_mock, "test", "repo", 42, &baseConfig)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
if test_err != nil {
|
||||
t.Fatal("Expected", test_err, "but got", err)
|
||||
}
|
||||
} else {
|
||||
if res != nil {
|
||||
t.Fatal("error but got ReviewSet?")
|
||||
}
|
||||
|
||||
if test.api_error != "" {
|
||||
if err.Error() != test.api_error {
|
||||
t.Fatal("expected", test.api_error, "but got", err)
|
||||
}
|
||||
} else if test_err != err {
|
||||
t.Fatal("expected", test_err, "but got", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if test.resLen != len(res.PRs) {
|
||||
t.Error("expected result len", test.resLen, "but got", len(res.PRs))
|
||||
}
|
||||
|
||||
PrjGitPR, err := res.GetPrjGitPR()
|
||||
if test.prjGitPRIndex < 0 {
|
||||
if err == nil {
|
||||
t.Error("expected error, but nothing")
|
||||
}
|
||||
}
|
||||
pr_found := false
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
for i := range test.data {
|
||||
if PrjGitPR == test.data[i].pr && i == test.prjGitPRIndex {
|
||||
t.Log("found at index", i)
|
||||
pr_found = true
|
||||
}
|
||||
}
|
||||
if !pr_found {
|
||||
t.Error("Cannot find expected PrjGit location in PR set", PrjGitPR)
|
||||
}
|
||||
} else {
|
||||
if PrjGitPR != nil {
|
||||
t.Log("Expected prjgit not found, but found?", PrjGitPR)
|
||||
}
|
||||
}
|
||||
|
||||
if isConsistent := res.IsConsistent(); isConsistent != test.consistentSet {
|
||||
t.Error("IsConsistent() returned unexpected:", isConsistent)
|
||||
}
|
||||
/*
|
||||
if err := res.AssignReviewers(reviewer_mock); err != nil {
|
||||
t.Error("expected no errors assigning reviewers:", err)
|
||||
}
|
||||
*/
|
||||
|
||||
maintainers := mock_common.NewMockMaintainershipData(ctl)
|
||||
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any()).Return(true).AnyTimes()
|
||||
|
||||
if isApproved := res.IsApproved(review_mock, maintainers); isApproved != test.reviewed {
|
||||
t.Error("expected reviewed to be NOT", isApproved)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRAssignReviewers(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
pkgReviews []*models.PullReview
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "No reviewers",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer and one pkg reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"user2", "prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "Reviews are done",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{},
|
||||
},
|
||||
{
|
||||
name: "Stale review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "pkgrepo", Owner: &models.User{UserName: "other"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("other", "pkgrepo", int64(1)).Return(test.pkgReviews, nil)
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR("org", "repo", "other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: fmt.Sprintf(common.PrPattern, "other", "pkgrepo", 1),
|
||||
User: &models.User{UserName: "bot1"},
|
||||
RequestedReviewers: []*models.User{{UserName: "main_reviewer"}},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 42,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo").Return([]string{"pkgmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "other", "pkgrepo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 2 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
r = test.expectedReviewerCall[1]
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
|
||||
prjgit_tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "PrjMaintainers in prjgit review when not part of pkg set",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot", "prjmaintainer"}},
|
||||
},
|
||||
}
|
||||
for _, test := range prjgit_tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "org", "repo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 1 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
t.Fatal("only prjgit pr here")
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRMerge(t *testing.T) {
|
||||
cwd, _ := os.Getwd()
|
||||
cmd := exec.Command("/usr/bin/bash", path.Join(cwd, "test_repo_setup.sh"))
|
||||
cmd.Dir = t.TempDir()
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
t.Fatal(string(out))
|
||||
}
|
||||
|
||||
common.ExtraGitParams = []string{
|
||||
"GIT_CONFIG_COUNT=1",
|
||||
"GIT_CONFIG_KEY_0=protocol.file.allow",
|
||||
"GIT_CONFIG_VALUE_0=always",
|
||||
|
||||
"GIT_AUTHOR_NAME=testname",
|
||||
"GIT_AUTHOR_EMAIL=test@suse.com",
|
||||
"GIT_AUTHOR_DATE='2005-04-07T22:13:13'",
|
||||
"GIT_COMMITTER_NAME=testname",
|
||||
"GIT_COMMITTER_EMAIL=test@suse.com",
|
||||
"GIT_COMMITTER_DATE='2005-04-07T22:13:13'",
|
||||
}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Organization: "org",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
pr *models.PullRequest
|
||||
mergeError string
|
||||
}{
|
||||
{
|
||||
name: "Merge base not merged in main",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "e8b0de43d757c96a9d2c7101f4bff404e322f53a1fa4041fb85d646110c38ad4", // "base_add_b1"
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
mergeError: "Aborting merge",
|
||||
},
|
||||
{
|
||||
name: "Merge conflict in modules",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "4fbd1026b2d7462ebe9229a49100c11f1ad6555520a21ba515122d8bc41328a8",
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
|
||||
mock.EXPECT().GetPullRequest("org", "prj", int64(1)).Return(test.pr, nil)
|
||||
|
||||
set, err := common.FetchPRSet(mock, "org", "prj", 1, config)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = set.Merge("test", "test@example.com"); err != nil && (test.mergeError == "" || (len(test.mergeError) > 0 && !strings.Contains(err.Error(), test.mergeError))) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,38 +0,0 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestReviewers(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []string
|
||||
|
||||
prj []string
|
||||
pkg []string
|
||||
}{
|
||||
{
|
||||
name: "project and package reviewers",
|
||||
input: []string{"1", "2", "3", "*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"5", "7", common.Bot_BuildReview},
|
||||
pkg: []string{"1", "2", "3", "5", "6"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
reviewers := common.ParseReviewers(test.input)
|
||||
if !slices.Equal(reviewers.Prj, test.prj) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Prj)
|
||||
}
|
||||
if !slices.Equal(reviewers.Pkg, test.pkg) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Pkg)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,87 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRReviews struct {
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
}
|
||||
|
||||
func FetchGiteaReviews(rf GiteaReviewFetcher, reviewers []string, org, repo string, no int64) (*PRReviews, error) {
|
||||
reviews, err := rf.GetPullRequestReviews(org, repo, no)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRReviews{
|
||||
reviews: reviews,
|
||||
reviewers: reviewers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsApproved() bool {
|
||||
goodReview := false
|
||||
|
||||
for _, reviewer := range r.reviewers {
|
||||
goodReview = false
|
||||
for _, review := range r.reviews {
|
||||
if review.User.UserName == reviewer && review.State == ReviewStateApproved && !review.Stale {
|
||||
goodReview = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !goodReview {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return goodReview
|
||||
}
|
||||
|
||||
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
isPending := false
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
fallthrough
|
||||
case ReviewStateRequestChanges:
|
||||
return false
|
||||
case ReviewStateRequestReview:
|
||||
fallthrough
|
||||
case ReviewStatePending:
|
||||
isPending = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return isPending
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
return true
|
||||
case ReviewStateRequestChanges:
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@@ -1,50 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func SplitStringNoEmpty(str, sep string) []string {
|
||||
ret := slices.DeleteFunc(strings.Split(str, sep), func(s string) bool {
|
||||
return len(strings.TrimSpace(s)) == 0
|
||||
})
|
||||
for i := range ret {
|
||||
ret[i] = strings.TrimSpace(ret[i])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func TranslateHttpsToSshUrl(url string) (string, error) {
|
||||
const url1 = "https://src.opensuse.org/"
|
||||
const url2 = "https://src.suse.de/"
|
||||
|
||||
if len(url) > len(url1) && url[0:len(url1)] == url1 {
|
||||
return "gitea@src.opensuse.org:" + url[len(url1):], nil
|
||||
}
|
||||
if len(url) > len(url2) && url[0:len(url2)] == url2 {
|
||||
return "gitea@src.suse.de:" + url[len(url2):], nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("Unknown input url %s", url)
|
||||
}
|
||||
|
17
common/Makefile
Normal file
17
common/Makefile
Normal file
@@ -0,0 +1,17 @@
|
||||
all: build
|
||||
|
||||
api.json::
|
||||
curl -o api.json https://src.opensuse.org/swagger.v1.json
|
||||
|
||||
gitea-generated/client/gitea_api_client.go: api.json
|
||||
[ -d gitea-generated ] || mkdir gitea-generated
|
||||
podman run --rm -v $$(pwd)/..:/api ghcr.io/go-swagger/go-swagger generate client -f /api/common/api.json -t /api/common/gitea-generated
|
||||
|
||||
swagger: gitea-generated/client/gitea_api_client.go
|
||||
|
||||
api:
|
||||
go generate
|
||||
|
||||
build: api
|
||||
go build
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -11,14 +11,14 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const PrPattern = "PR: %s/%s#%d"
|
||||
const PrPattern = "PR: %s/%s!%d"
|
||||
|
||||
type BasicPR struct {
|
||||
Org, Repo string
|
||||
Num int64
|
||||
}
|
||||
|
||||
var validOrgAndRepoRx *regexp.Regexp = regexp.MustCompile("^[A-Za-z0-9_-]+$")
|
||||
var validOrgAndRepoRx *regexp.Regexp = regexp.MustCompile("^[A-Za-z0-9_\\.-]+$")
|
||||
|
||||
func parsePrLine(line string) (BasicPR, error) {
|
||||
var ret BasicPR
|
||||
@@ -36,10 +36,14 @@ func parsePrLine(line string) (BasicPR, error) {
|
||||
return ret, errors.New("missing / separator")
|
||||
}
|
||||
|
||||
repo := strings.SplitN(org[1], "#", 2)
|
||||
repo := strings.SplitN(org[1], "!", 2)
|
||||
ret.Repo = repo[0]
|
||||
if len(repo) != 2 {
|
||||
return ret, errors.New("Missing # separator")
|
||||
repo = strings.SplitN(org[1], "#", 2)
|
||||
ret.Repo = repo[0]
|
||||
}
|
||||
if len(repo) != 2 {
|
||||
return ret, errors.New("Missing ! or # separator")
|
||||
}
|
||||
|
||||
// Gitea requires that each org and repo be [A-Za-z0-9_-]+
|
@@ -34,7 +34,7 @@ func TestAssociatedPRScanner(t *testing.T) {
|
||||
},
|
||||
{
|
||||
"Multiple PRs",
|
||||
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo#5\n",
|
||||
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo!5\n",
|
||||
[]common.BasicPR{
|
||||
{Org: "test", Repo: "foo", Num: 4},
|
||||
{Org: "test", Repo: "goo", Num: 5},
|
||||
@@ -107,7 +107,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
[]common.BasicPR{
|
||||
{Org: "a", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a/b#100",
|
||||
"something\n\nPR: a/b!100",
|
||||
},
|
||||
{
|
||||
"Append multiple PR to end of description",
|
||||
@@ -119,7 +119,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
{Org: "b", Repo: "b", Num: 100},
|
||||
{Org: "c", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
|
||||
},
|
||||
{
|
||||
"Append multiple sorted PR to end of description and remove dups",
|
||||
@@ -133,7 +133,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "a1", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
|
||||
},
|
||||
}
|
||||
|
267
common/config.go
Normal file
267
common/config.go
Normal file
@@ -0,0 +1,267 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/tailscale/hujson"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=config.go -destination=mock/config.go -typed
|
||||
|
||||
const (
|
||||
ProjectConfigFile = "workflow.config"
|
||||
StagingConfigFile = "staging.config"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
GitProjectNames []string
|
||||
}
|
||||
|
||||
type ReviewGroup struct {
|
||||
Name string
|
||||
Silent bool // will not request reviews from group members
|
||||
Reviewers []string
|
||||
}
|
||||
|
||||
type QAConfig struct {
|
||||
Name string
|
||||
Origin string
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
ReviewGroups []*ReviewGroup
|
||||
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
|
||||
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
|
||||
|
||||
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
|
||||
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
|
||||
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
|
||||
}
|
||||
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
|
||||
func ReadConfig(reader io.Reader) (*ConfigFile, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error reading config data: %w", err)
|
||||
}
|
||||
|
||||
config := ConfigFile{}
|
||||
data, err = hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to parse json: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(data, &config.GitProjectNames); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing Git Project paths: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadConfigFile(filename string) (*ConfigFile, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadConfig(file)
|
||||
}
|
||||
|
||||
type GiteaFileContentAndRepoFetcher interface {
|
||||
GiteaFileContentReader
|
||||
GiteaRepoFetcher
|
||||
}
|
||||
|
||||
func UnmarshalWorkflowConfig(data []byte) (*AutogitConfig, error) {
|
||||
var config AutogitConfig
|
||||
data, err := hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to parse json: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing workflow config file: %s: %w", string(data), err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadWorkflowConfig(gitea GiteaFileContentAndRepoFetcher, git_project string) (*AutogitConfig, error) {
|
||||
hash := strings.Split(git_project, "#")
|
||||
branch := ""
|
||||
if len(hash) > 1 {
|
||||
branch = hash[1]
|
||||
}
|
||||
|
||||
a := strings.Split(hash[0], "/")
|
||||
prjGitRepo := DefaultGitPrj
|
||||
switch len(a) {
|
||||
case 1:
|
||||
case 2:
|
||||
prjGitRepo = a[1]
|
||||
default:
|
||||
return nil, fmt.Errorf("Missing org/repo in projectgit: %s", git_project)
|
||||
}
|
||||
|
||||
data, _, err := gitea.GetRepositoryFileContent(a[0], prjGitRepo, branch, ProjectConfigFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching 'workflow.config' for %s/%s#%s: %w", a[0], prjGitRepo, branch, err)
|
||||
}
|
||||
|
||||
config, err := UnmarshalWorkflowConfig(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(config.Organization) < 1 {
|
||||
config.Organization = a[0]
|
||||
}
|
||||
config.GitProjectName = a[0] + "/" + prjGitRepo
|
||||
if len(branch) == 0 {
|
||||
if r, err := gitea.GetRepository(a[0], prjGitRepo); err == nil {
|
||||
branch = r.DefaultBranch
|
||||
} else {
|
||||
return nil, fmt.Errorf("Failed to read workflow config in %s: %w", git_project, err)
|
||||
}
|
||||
}
|
||||
config.GitProjectName = config.GitProjectName + "#" + branch
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func ResolveWorkflowConfigs(gitea GiteaFileContentAndRepoFetcher, config *ConfigFile) (AutogitConfigs, error) {
|
||||
configs := make([]*AutogitConfig, 0, len(config.GitProjectNames))
|
||||
for _, git_project := range config.GitProjectNames {
|
||||
c, err := ReadWorkflowConfig(gitea, git_project)
|
||||
if err != nil {
|
||||
// can't sync, so ignore for now
|
||||
log.Println(err)
|
||||
} else {
|
||||
configs = append(configs, c)
|
||||
}
|
||||
}
|
||||
|
||||
return configs, nil
|
||||
}
|
||||
|
||||
func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *AutogitConfig {
|
||||
prjgit := org + "/" + repo + "#" + branch
|
||||
for _, c := range configs {
|
||||
if c.GitProjectName == prjgit {
|
||||
return c
|
||||
}
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetReviewGroupMembers(reviewer string) ([]string, error) {
|
||||
for _, g := range config.ReviewGroups {
|
||||
if g.Name == reviewer {
|
||||
return g.Reviewers, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetReviewGroup(reviewer string) (*ReviewGroup, error) {
|
||||
for _, g := range config.ReviewGroups {
|
||||
if g.Name == reviewer {
|
||||
return g, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetPrjGit() (string, string, string) {
|
||||
org := config.Organization
|
||||
repo := DefaultGitPrj
|
||||
branch := ""
|
||||
|
||||
a := strings.Split(config.GitProjectName, "/")
|
||||
if len(a[0]) > 0 {
|
||||
repo = strings.TrimSpace(a[0])
|
||||
}
|
||||
if len(a) == 2 {
|
||||
if a[0] = strings.TrimSpace(a[0]); len(a[0]) > 0 {
|
||||
org = a[0]
|
||||
}
|
||||
repo = strings.TrimSpace(a[1])
|
||||
}
|
||||
b := strings.Split(repo, "#")
|
||||
if len(b) == 2 {
|
||||
if b[0] = strings.TrimSpace(b[0]); len(b[0]) > 0 {
|
||||
repo = b[0]
|
||||
} else {
|
||||
repo = DefaultGitPrj
|
||||
}
|
||||
if b[1] = strings.TrimSpace(b[1]); len(b[1]) > 0 {
|
||||
branch = strings.TrimSpace(b[1])
|
||||
}
|
||||
}
|
||||
|
||||
if len(branch) == 0 {
|
||||
panic("branch for project is undefined. Should not happend." + org + "/" + repo)
|
||||
}
|
||||
return org, repo, branch
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetRemoteBranch() string {
|
||||
return "origin_" + config.Branch
|
||||
}
|
||||
|
||||
type StagingConfig struct {
|
||||
ObsProject string
|
||||
RebuildAll bool
|
||||
CleanupDelay int // cleanup delay, in hours, for unmerged closed PRs (def: 48)
|
||||
|
||||
// if set, then only use pull request numbers as unique identifiers
|
||||
StagingProject string
|
||||
QA []QAConfig
|
||||
}
|
||||
|
||||
func ParseStagingConfig(data []byte) (*StagingConfig, error) {
|
||||
var staging StagingConfig
|
||||
data, err := hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
staging.CleanupDelay = 48
|
||||
if err := json.Unmarshal(data, &staging); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &staging, nil
|
||||
}
|
192
common/config_test.go
Normal file
192
common/config_test.go
Normal file
@@ -0,0 +1,192 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestProjectConfigMatcher(t *testing.T) {
|
||||
configs := common.AutogitConfigs{
|
||||
{
|
||||
Organization: "test",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
Branch: "main",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
org string
|
||||
repo string
|
||||
branch string
|
||||
config int
|
||||
}{
|
||||
{
|
||||
name: "invalid match",
|
||||
org: "foo",
|
||||
repo: "bar",
|
||||
config: -1,
|
||||
},
|
||||
{
|
||||
name: "default branch",
|
||||
org: "test",
|
||||
repo: "foo",
|
||||
branch: "",
|
||||
config: 0,
|
||||
},
|
||||
{
|
||||
name: "main branch",
|
||||
org: "test",
|
||||
repo: "foo",
|
||||
branch: "main",
|
||||
config: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c := configs.GetPrjGitConfig(test.org, test.repo, test.branch)
|
||||
if test.config < 0 {
|
||||
if c != nil {
|
||||
t.Fatal("Expected nil. Got:", *c)
|
||||
}
|
||||
} else if config := configs[test.config]; c != config {
|
||||
t.Fatal("Expected", *config, "got", *c)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigWorkflowParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
config_json string
|
||||
repo models.Repository
|
||||
}{
|
||||
{
|
||||
name: "Regular workflow file",
|
||||
config_json: `{
|
||||
"Workflows": ["direct", "pr"],
|
||||
"Organization": "testing",
|
||||
"ReviewGroups": [
|
||||
{
|
||||
"Name": "gnuman1",
|
||||
"Reviewers": ["adamm"]
|
||||
}
|
||||
]
|
||||
}`,
|
||||
repo: models.Repository{
|
||||
DefaultBranch: "master",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.config_json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&test.repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if config.ManualMergeOnly != false {
|
||||
t.Fatal("This should be false")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: should test ReadWorkflowConfig as it will always set prjgit completely
|
||||
func TestProjectGitParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
prjgit string
|
||||
org string
|
||||
branch string
|
||||
res [3]string
|
||||
}{
|
||||
{
|
||||
name: "repo only",
|
||||
prjgit: "repo.git#master",
|
||||
org: "org",
|
||||
branch: "br",
|
||||
res: [3]string{"org", "repo.git", "master"},
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
org: "org",
|
||||
prjgit: "org/_ObsPrj#master",
|
||||
res: [3]string{"org", common.DefaultGitPrj, "master"},
|
||||
},
|
||||
{
|
||||
name: "repo with branch",
|
||||
org: "org2",
|
||||
prjgit: "org2/repo.git#somebranch",
|
||||
res: [3]string{"org2", "repo.git", "somebranch"},
|
||||
},
|
||||
{
|
||||
name: "repo org and branch",
|
||||
org: "org3",
|
||||
prjgit: "oorg/foo.bar#point",
|
||||
res: [3]string{"oorg", "foo.bar", "point"},
|
||||
},
|
||||
{
|
||||
name: "whitespace shouldn't matter",
|
||||
prjgit: " oorg / \nfoo.bar\t # point ",
|
||||
res: [3]string{"oorg", "foo.bar", "point"},
|
||||
},
|
||||
{
|
||||
name: "repo org and empty branch",
|
||||
org: "org3",
|
||||
prjgit: "oorg/foo.bar#master",
|
||||
res: [3]string{"oorg", "foo.bar", "master"},
|
||||
},
|
||||
{
|
||||
name: "only branch defined",
|
||||
org: "org3",
|
||||
prjgit: "org3/_ObsPrj#mybranch",
|
||||
res: [3]string{"org3", "_ObsPrj", "mybranch"},
|
||||
},
|
||||
{
|
||||
name: "only org and branch defined",
|
||||
org: "org3",
|
||||
prjgit: "org1/_ObsPrj#mybranch",
|
||||
res: [3]string{"org1", "_ObsPrj", "mybranch"},
|
||||
},
|
||||
{
|
||||
name: "empty org and repo",
|
||||
org: "org3",
|
||||
prjgit: "org3/repo#master",
|
||||
res: [3]string{"org3", "repo", "master"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c := &common.AutogitConfig{
|
||||
Organization: test.org,
|
||||
Branch: test.branch,
|
||||
GitProjectName: test.prjgit,
|
||||
}
|
||||
|
||||
i, j, k := c.GetPrjGit()
|
||||
res := []string{i, j, k}
|
||||
if !slices.Equal(res, test.res[:]) {
|
||||
t.Error("Expected", test.res, "but received", res)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -33,3 +33,6 @@ const (
|
||||
|
||||
TopicApp = "src"
|
||||
)
|
||||
|
||||
// when set, pushing to remote does not happen, and other remote side-effects should also not happen
|
||||
var IsDryRun bool
|
296
common/git_parser.go
Normal file
296
common/git_parser.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
|
||||
/*
|
||||
<sub> A 4 character field describing the submodule state.
|
||||
"N..." when the entry is not a submodule.
|
||||
"S<c><m><u>" when the entry is a submodule.
|
||||
<c> is "C" if the commit changed; otherwise ".".
|
||||
<m> is "M" if it has tracked changes; otherwise ".".
|
||||
<u> is "U" if there are untracked changes; otherwise ".".
|
||||
*/
|
||||
SubmoduleChanges string
|
||||
}
|
||||
|
||||
func parseGit_HexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGit_String(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 || c == ' ' {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func parseGit_StringWithSpace(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 2); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.SubmoduleChanges, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = skipGitStatusEntry(data, 4); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ret.States[0], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) (Data, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
type Data interface{}
|
||||
|
||||
type CommitStatus int
|
||||
|
||||
const (
|
||||
Add CommitStatus = iota
|
||||
Rm
|
||||
Copy
|
||||
Modify
|
||||
Rename
|
||||
TypeChange
|
||||
Unmerged
|
||||
Unknown
|
||||
)
|
||||
|
||||
type GitDiffRawData struct {
|
||||
SrcMode, DstMode string
|
||||
SrcCommit, DstCommit string
|
||||
Status CommitStatus
|
||||
Src, Dst string
|
||||
}
|
||||
|
||||
func parseGit_DiffIndexStatus(data io.ByteReader, d *GitDiffRawData) error {
|
||||
b, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch b {
|
||||
case 'A':
|
||||
d.Status = Add
|
||||
case 'C':
|
||||
d.Status = Copy
|
||||
case 'D':
|
||||
d.Status = Rm
|
||||
case 'M':
|
||||
d.Status = Modify
|
||||
case 'R':
|
||||
d.Status = Rename
|
||||
case 'T':
|
||||
d.Status = TypeChange
|
||||
case 'U':
|
||||
d.Status = Unmerged
|
||||
case 'X':
|
||||
return fmt.Errorf("Unexpected unknown change type. This is a git bug")
|
||||
}
|
||||
_, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleGitDiffIndexRawData(data io.ByteReader) (*GitDiffRawData, error) {
|
||||
var ret GitDiffRawData
|
||||
|
||||
b, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if b != ':' {
|
||||
return nil, fmt.Errorf("Expected ':' but got '%s'", string(b))
|
||||
}
|
||||
|
||||
if ret.SrcMode, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.DstMode, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.Src, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.Dst, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = parseGit_DiffIndexStatus(data, &ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Dst = ret.Src
|
||||
switch ret.Status {
|
||||
case Copy, Rename:
|
||||
if ret.Src, err = parseGit_StringWithSpace(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitDiffIndexRawData(data io.ByteReader) (Data, error) {
|
||||
ret := make([]GitDiffRawData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleGitDiffIndexRawData(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
@@ -19,15 +19,14 @@ package common
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
@@ -39,16 +38,28 @@ type GitSubmoduleLister interface {
|
||||
GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool)
|
||||
}
|
||||
|
||||
type GitSubmoduleFileConflictResolver interface {
|
||||
GitResolveSubmoduleFileConflict(cwd string) error
|
||||
}
|
||||
|
||||
type GitStatusLister interface {
|
||||
GitStatus(cwd string) ([]GitStatusData, error)
|
||||
}
|
||||
|
||||
type GitDiffLister interface {
|
||||
GitDiff(cwd, base, head string) (string, error)
|
||||
}
|
||||
|
||||
type Git interface {
|
||||
// error if git, but wrong remote
|
||||
GitClone(repo, branch, remoteUrl string) (string, error) // clone, or check if path is already checked out remote and force pulls, error otherwise. Return remotename, errror
|
||||
|
||||
GitParseCommits(cwd string, commitIDs []string) (parsedCommits []GitCommit, err error)
|
||||
GitCatFile(cwd, commitId, filename string) (data []byte, err error)
|
||||
GetPath() string
|
||||
|
||||
GitBranchHead(gitDir, branchName string) (string, error)
|
||||
GitRemoteHead(gitDir, remoteName, branchName string) (string, error)
|
||||
io.Closer
|
||||
|
||||
GitSubmoduleLister
|
||||
@@ -58,14 +69,19 @@ type Git interface {
|
||||
GitExecOrPanic(cwd string, params ...string)
|
||||
GitExec(cwd string, params ...string) error
|
||||
GitExecWithOutput(cwd string, params ...string) (string, error)
|
||||
GitExecQuietOrPanic(cwd string, params ...string)
|
||||
|
||||
GitDiffLister
|
||||
GitSubmoduleFileConflictResolver
|
||||
}
|
||||
|
||||
type GitHandlerImpl struct {
|
||||
DebugLogger bool
|
||||
|
||||
GitPath string
|
||||
GitCommiter string
|
||||
GitEmail string
|
||||
|
||||
lock *sync.Mutex
|
||||
quiet bool
|
||||
}
|
||||
|
||||
func (s *GitHandlerImpl) GetPath() string {
|
||||
@@ -73,34 +89,88 @@ func (s *GitHandlerImpl) GetPath() string {
|
||||
}
|
||||
|
||||
type GitHandlerGenerator interface {
|
||||
CreateGitHandler(git_author, email, prjName string) (Git, error)
|
||||
ReadExistingPath(git_author, email, gitPath string) (Git, error)
|
||||
CreateGitHandler(org string) (Git, error)
|
||||
ReadExistingPath(org string) (Git, error)
|
||||
|
||||
ReleaseLock(path string)
|
||||
}
|
||||
|
||||
type GitHandlerGeneratorImpl struct{}
|
||||
type gitHandlerGeneratorImpl struct {
|
||||
path string
|
||||
git_author string
|
||||
email string
|
||||
|
||||
func (s *GitHandlerGeneratorImpl) CreateGitHandler(git_author, email, prj_name string) (Git, error) {
|
||||
gitPath, err := os.MkdirTemp("", prj_name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create temp dir: %w", err)
|
||||
}
|
||||
|
||||
if err = os.Chmod(gitPath, 0700); err != nil {
|
||||
return nil, fmt.Errorf("Cannot fix permissions of temp dir: %w", err)
|
||||
}
|
||||
|
||||
return s.ReadExistingPath(git_author, email, gitPath)
|
||||
lock_lock sync.Mutex
|
||||
lock map[string]*sync.Mutex // per org
|
||||
}
|
||||
|
||||
func (*GitHandlerGeneratorImpl) ReadExistingPath(git_author, email, gitPath string) (Git, error) {
|
||||
func AllocateGitWorkTree(basePath, gitAuthor, email string) (*gitHandlerGeneratorImpl, error) {
|
||||
if fi, err := os.Stat(basePath); err != nil || !fi.IsDir() {
|
||||
return nil, fmt.Errorf("Git basepath not a valid directory: %s %w", basePath, err)
|
||||
}
|
||||
|
||||
if fi, err := os.Stat(basePath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(basePath, 0o700); err != nil {
|
||||
return nil, fmt.Errorf("Cannot create git directory structure: %s: %w", basePath, err)
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("Error checking git directory strcture: %s: %w", basePath, err)
|
||||
}
|
||||
} else if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("Invalid git directory structure: %s != directory", basePath)
|
||||
}
|
||||
|
||||
return &gitHandlerGeneratorImpl{
|
||||
path: basePath,
|
||||
git_author: gitAuthor,
|
||||
email: email,
|
||||
|
||||
lock: make(map[string]*sync.Mutex),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *gitHandlerGeneratorImpl) CreateGitHandler(org string) (Git, error) {
|
||||
path := path.Join(s.path, org)
|
||||
if fs, err := os.Stat(path); (err != nil && !os.IsNotExist(err)) || (err == nil && !fs.IsDir()) {
|
||||
return nil, err
|
||||
} else if err != nil && os.IsNotExist(err) {
|
||||
if err := os.MkdirAll(path, 0o777); err != nil && !os.IsExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return s.ReadExistingPath(org)
|
||||
}
|
||||
|
||||
func (s *gitHandlerGeneratorImpl) ReadExistingPath(org string) (Git, error) {
|
||||
LogDebug("Locking git org:", org)
|
||||
s.lock_lock.Lock()
|
||||
defer s.lock_lock.Unlock()
|
||||
|
||||
if _, ok := s.lock[org]; !ok {
|
||||
s.lock[org] = &sync.Mutex{}
|
||||
}
|
||||
s.lock[org].Lock()
|
||||
|
||||
git := &GitHandlerImpl{
|
||||
GitCommiter: git_author,
|
||||
GitPath: gitPath,
|
||||
GitCommiter: s.git_author,
|
||||
GitEmail: s.email,
|
||||
GitPath: path.Join(s.path, org),
|
||||
lock: s.lock[org],
|
||||
}
|
||||
|
||||
return git, nil
|
||||
}
|
||||
|
||||
func (s *gitHandlerGeneratorImpl) ReleaseLock(org string) {
|
||||
m, ok := s.lock[org]
|
||||
if ok {
|
||||
LogDebug("Unlocking git org:", org)
|
||||
m.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
//func (h *GitHandler) ProcessBranchList() []string {
|
||||
// if h.HasError() {
|
||||
// return make([]string, 0)
|
||||
@@ -139,20 +209,101 @@ func (refs *GitReferences) addReference(id, branch string) {
|
||||
refs.refs = append(refs.refs, GitReference{Branch: branch, Id: id})
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
id, err := e.GitExecWithOutput(gitDir, "rev-list", "-1", branchName)
|
||||
func (e *GitHandlerImpl) GitClone(repo, branch, remoteUrl string) (string, error) {
|
||||
LogDebug("Cloning", remoteUrl, " repo:", repo, " branch:", branch)
|
||||
remoteUrlComp, err := ParseGitRemoteUrl(remoteUrl)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Can't find default remote branch: %s", branchName)
|
||||
return "", fmt.Errorf("Cannot parse remote URL: %w", err)
|
||||
}
|
||||
remoteBranch := "HEAD"
|
||||
if len(branch) == 0 && remoteUrlComp != nil && remoteUrlComp.Commit != "HEAD" {
|
||||
branch = remoteUrlComp.Commit
|
||||
remoteBranch = branch
|
||||
} else if len(branch) > 0 {
|
||||
remoteBranch = branch
|
||||
}
|
||||
remoteName := remoteUrlComp.RemoteName()
|
||||
if remoteUrlComp != nil {
|
||||
LogDebug("Clone", *remoteUrlComp, " -> ", remoteName)
|
||||
} else {
|
||||
LogDebug("Clone", "[default] -> ", remoteName)
|
||||
}
|
||||
|
||||
remoteRef := remoteName + "/" + remoteBranch
|
||||
if fi, err := os.Stat(path.Join(e.GitPath, repo)); os.IsNotExist(err) {
|
||||
if err = e.GitExec("", "clone", "--origin", remoteName, remoteUrl, repo); err != nil {
|
||||
return remoteName, err
|
||||
}
|
||||
} else if err != nil || !fi.IsDir() {
|
||||
return remoteName, fmt.Errorf("Clone location not a directory or Stat error: %w", err)
|
||||
} else {
|
||||
if u, err := e.GitExecWithOutput(repo, "remote", "get-url", remoteName); err != nil {
|
||||
e.GitExecOrPanic(repo, "remote", "add", remoteName, remoteUrl)
|
||||
} else if clonedRemote := strings.TrimSpace(u); clonedRemote != remoteUrl {
|
||||
e.GitExecOrPanic(repo, "remote", "set-url", remoteName, remoteUrl)
|
||||
}
|
||||
|
||||
// check if we have submodule to deinit
|
||||
if list, _ := e.GitSubmoduleList(repo, "HEAD"); len(list) > 0 {
|
||||
e.GitExecQuietOrPanic(repo, "submodule", "deinit", "--all", "--force")
|
||||
}
|
||||
|
||||
e.GitExecOrPanic(repo, "fetch", "--prune", remoteName, remoteBranch)
|
||||
}
|
||||
/*
|
||||
refsBytes, err := os.ReadFile(path.Join(e.GitPath, repo, ".git/refs/remotes", remoteName, "HEAD"))
|
||||
if err != nil {
|
||||
LogError("Cannot read HEAD of remote", remoteName)
|
||||
return remoteName, fmt.Errorf("Cannot read HEAD of remote %s", remoteName)
|
||||
}
|
||||
|
||||
refs := string(refsBytes)
|
||||
if refs[0:5] != "ref: " {
|
||||
LogError("Unexpected format of remote HEAD ref:", refs)
|
||||
return remoteName, fmt.Errorf("Unexpected format of remote HEAD ref: %s", refs)
|
||||
}
|
||||
|
||||
if len(branch) == 0 || branch == "HEAD" {
|
||||
remoteRef = strings.TrimSpace(refs[5:])
|
||||
branch = remoteRef[strings.LastIndex(remoteRef, "/")+1:]
|
||||
LogDebug("remoteRef", remoteRef)
|
||||
LogDebug("branch", branch)
|
||||
}
|
||||
*/
|
||||
args := []string{"fetch", "--prune", remoteName, branch}
|
||||
if strings.TrimSpace(e.GitExecWithOutputOrPanic(repo, "rev-parse", "--is-shallow-repository")) == "true" {
|
||||
args = slices.Insert(args, 1, "--unshallow")
|
||||
}
|
||||
e.GitExecOrPanic(repo, args...)
|
||||
return remoteName, e.GitExec(repo, "checkout", "--track", "-B", branch, remoteRef)
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--heads", "--hash", branchName)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Can't find default branch: %s", branchName)
|
||||
}
|
||||
|
||||
id = strings.TrimSpace(SplitLines(id)[0])
|
||||
if len(id) < 10 {
|
||||
return "", fmt.Errorf("Can't find branch: %s", branchName)
|
||||
}
|
||||
|
||||
return id, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitRemoteHead(gitDir, remote, branchName string) (string, error) {
|
||||
id, err := e.GitExecWithOutput(gitDir, "show-ref", "--hash", "--verify", "refs/remotes/"+remote+"/"+branchName)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Can't find default branch: %s", branchName)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(id), nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) Close() error {
|
||||
if err := os.RemoveAll(e.GitPath); err != nil {
|
||||
return err
|
||||
}
|
||||
e.GitPath = ""
|
||||
LogDebug("Unlocking git lock")
|
||||
e.lock.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -175,14 +326,16 @@ func (h writeFunc) Close() error {
|
||||
func (e *GitHandlerImpl) GitExecWithOutputOrPanic(cwd string, params ...string) string {
|
||||
out, err := e.GitExecWithOutput(cwd, params...)
|
||||
if err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
LogError("git command failed:", params, "@", cwd, "err:", err)
|
||||
panic(err)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExecOrPanic(cwd string, params ...string) {
|
||||
if err := e.GitExec(cwd, params...); err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
LogError("git command failed:", params, "@", cwd, "err:", err)
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,6 +355,7 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
|
||||
"GIT_COMMITTER_NAME=" + e.GitCommiter,
|
||||
"EMAIL=not@exist@src.opensuse.org",
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_LFS_SKIP_PUSH=1",
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
|
||||
}
|
||||
if len(ExtraGitParams) > 0 {
|
||||
@@ -210,23 +364,26 @@ func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdin = nil
|
||||
|
||||
if e.DebugLogger {
|
||||
log.Printf("git execute: %#v\n", cmd.Args)
|
||||
}
|
||||
LogDebug("git execute @", cwd, ":", cmd.Args)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if e.DebugLogger {
|
||||
log.Println(string(out))
|
||||
if !e.quiet {
|
||||
LogDebug(string(out))
|
||||
}
|
||||
if err != nil {
|
||||
if e.DebugLogger {
|
||||
log.Printf(" *** error: %v\n", err)
|
||||
}
|
||||
LogError("git", cmd.Args, " error:", err)
|
||||
return "", fmt.Errorf("error executing: git %#v \n%s\n err: %w", cmd.Args, out, err)
|
||||
}
|
||||
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExecQuietOrPanic(cwd string, params ...string) {
|
||||
e.quiet = true
|
||||
e.GitExecOrPanic(cwd, params...)
|
||||
e.quiet = false
|
||||
return
|
||||
}
|
||||
|
||||
type ChanIO struct {
|
||||
ch chan byte
|
||||
}
|
||||
@@ -356,21 +513,29 @@ func parseGitMsg(data <-chan byte) (GitMsg, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func parseGitCommitHdr(data <-chan byte) ([2]string, error) {
|
||||
func parseGitCommitHdr(oldHdr [2]string, data <-chan byte) ([2]string, int, error) {
|
||||
hdr := make([]byte, 0, 60)
|
||||
val := make([]byte, 0, 1000)
|
||||
|
||||
c := <-data
|
||||
size := 1
|
||||
if c != '\n' { // end of header marker
|
||||
for ; c != ' '; c = <-data {
|
||||
hdr = append(hdr, c)
|
||||
size++
|
||||
}
|
||||
if size == 1 { // continuation header here
|
||||
hdr = []byte(oldHdr[0])
|
||||
val = append([]byte(oldHdr[1]), '\n')
|
||||
}
|
||||
for c := <-data; c != '\n'; c = <-data {
|
||||
val = append(val, c)
|
||||
size++
|
||||
}
|
||||
size++
|
||||
}
|
||||
|
||||
return [2]string{string(hdr), string(val)}, nil
|
||||
return [2]string{string(hdr), string(val)}, size, nil
|
||||
}
|
||||
|
||||
func parseGitCommitMsg(data <-chan byte, l int) (string, error) {
|
||||
@@ -380,7 +545,6 @@ func parseGitCommitMsg(data <-chan byte, l int) (string, error) {
|
||||
msg = append(msg, c)
|
||||
l--
|
||||
}
|
||||
// l--
|
||||
|
||||
if l != 0 {
|
||||
return "", fmt.Errorf("Unexpected data in the git commit msg: l=%d", l)
|
||||
@@ -400,12 +564,14 @@ func parseGitCommit(data <-chan byte) (GitCommit, error) {
|
||||
var c GitCommit
|
||||
l := hdr.size
|
||||
for {
|
||||
hdr, err := parseGitCommitHdr(data)
|
||||
var hdr [2]string
|
||||
hdr, size, err := parseGitCommitHdr(hdr, data)
|
||||
if err != nil {
|
||||
return GitCommit{}, nil
|
||||
}
|
||||
l -= size
|
||||
|
||||
if len(hdr[0])+len(hdr[1]) == 0 { // hdr end marker
|
||||
if size == 1 {
|
||||
break
|
||||
}
|
||||
|
||||
@@ -413,10 +579,7 @@ func parseGitCommit(data <-chan byte) (GitCommit, error) {
|
||||
case "tree":
|
||||
c.Tree = hdr[1]
|
||||
}
|
||||
|
||||
l -= len(hdr[0]) + len(hdr[1]) + 2
|
||||
}
|
||||
l--
|
||||
|
||||
c.Msg, err = parseGitCommitMsg(data, l)
|
||||
return c, err
|
||||
@@ -453,7 +616,6 @@ func parseTreeEntry(data <-chan byte, hashLen int) (GitTreeEntry, error) {
|
||||
}
|
||||
|
||||
func parseGitTree(data <-chan byte) (GitTree, error) {
|
||||
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return GitTree{}, err
|
||||
@@ -506,7 +668,7 @@ func (e *GitHandlerImpl) GitParseCommits(cwd string, commitIDs []string) (parsed
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
|
||||
parsedCommits = make([]GitCommit, 0, len(commitIDs))
|
||||
|
||||
go func() {
|
||||
@@ -536,15 +698,16 @@ func (e *GitHandlerImpl) GitParseCommits(cwd string, commitIDs []string) (parsed
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
LogDebug("command run:", cmd.Args)
|
||||
if e := cmd.Run(); e != nil {
|
||||
LogError(e)
|
||||
close(data_in.ch)
|
||||
close(data_out.ch)
|
||||
return nil, e
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
@@ -555,7 +718,7 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
@@ -563,24 +726,27 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
c, err := parseGitCommit(data_in.ch)
|
||||
|
||||
var c GitCommit
|
||||
c, err = parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing git commit: %v\n", err)
|
||||
LogError("Error parsing git commit:", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
tree, err := parseGitTree(data_in.ch)
|
||||
|
||||
var tree GitTree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
if e.DebugLogger {
|
||||
log.Printf("Error parsing git tree: %v\n", err)
|
||||
}
|
||||
LogError("Error parsing git tree:", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isBlob() && te.name == filename {
|
||||
LogInfo("blob", te.hash)
|
||||
data_out.Write([]byte(te.hash))
|
||||
data_out.ch <- '\x00'
|
||||
data, err = parseGitBlob(data_in.ch)
|
||||
@@ -588,7 +754,7 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
|
||||
}
|
||||
}
|
||||
|
||||
err = fmt.Errorf("file not found: '%s'", filename)
|
||||
LogError("file not found:", filename)
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
@@ -601,28 +767,29 @@ func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
LogDebug("command run:", cmd.Args)
|
||||
if e := cmd.Run(); e != nil {
|
||||
LogError(e)
|
||||
close(data_in.ch)
|
||||
close(data_out.ch)
|
||||
return nil, e
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
}
|
||||
|
||||
// return (filename) -> (hash) map for all submodules
|
||||
// TODO: recursive? map different orgs, not just assume '.' for path
|
||||
func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error) {
|
||||
var done sync.Mutex
|
||||
submoduleList = make(map[string]string)
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
|
||||
|
||||
LogDebug("Getting submodules for:", commitId)
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
@@ -636,19 +803,32 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
|
||||
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
var tree GitTree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git tree: %w", err)
|
||||
return
|
||||
}
|
||||
trees := make(map[string]string)
|
||||
trees[""] = c.Tree
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isSubmodule() {
|
||||
submoduleList[te.name] = te.hash
|
||||
for len(trees) > 0 {
|
||||
for p, tree := range trees {
|
||||
delete(trees, p)
|
||||
|
||||
data_out.Write([]byte(tree))
|
||||
data_out.ch <- '\x00'
|
||||
var tree GitTree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git tree: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isTree() {
|
||||
trees[p+te.name+"/"] = te.hash
|
||||
} else if te.isSubmodule() {
|
||||
submoduleList[p+te.name] = te.hash
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
@@ -663,34 +843,32 @@ func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleLi
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
LogDebug("command run:", cmd.Args)
|
||||
if e := cmd.Run(); e != nil {
|
||||
LogError(e)
|
||||
close(data_in.ch)
|
||||
close(data_out.ch)
|
||||
return submoduleList, e
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return submoduleList, err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool) {
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
data_in, data_out := ChanIO{make(chan byte)}, ChanIO{make(chan byte)}
|
||||
var wg sync.WaitGroup
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
if e.DebugLogger {
|
||||
log.Printf("getting commit id '%s' from git at '%s' with packageName: %s\n", commitId, cwd, packageName)
|
||||
}
|
||||
LogDebug("getting commit id", commitId, "from git at", cwd, "with packageName:", packageName)
|
||||
|
||||
go func() {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
subCommitId = "wrong"
|
||||
subCommitId = ""
|
||||
commitId = "ok"
|
||||
valid = false
|
||||
}
|
||||
@@ -703,14 +881,16 @@ func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string)
|
||||
data_out.ch <- '\x00'
|
||||
c, err := parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
log.Panicf("Error parsing git commit: %v\n", err)
|
||||
LogError("Error parsing git commit:", err)
|
||||
panic(err)
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
tree, err := parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
log.Panicf("Error parsing git tree: %v\n", err)
|
||||
LogError("Error parsing git tree:", err)
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
@@ -731,178 +911,25 @@ func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
log.Println(string(data))
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Printf("Error running command %v, err: %v", cmd.Args, err)
|
||||
LogDebug("command run:", cmd.Args)
|
||||
if e := cmd.Run(); e != nil {
|
||||
LogError(e)
|
||||
close(data_in.ch)
|
||||
close(data_out.ch)
|
||||
return subCommitId, false
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
return subCommitId, len(subCommitId) == len(commitId)
|
||||
return subCommitId, len(subCommitId) > 0
|
||||
}
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
}
|
||||
|
||||
func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGitStatusString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 7); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) ([]GitStatusData, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
if e.DebugLogger {
|
||||
log.Println("getting git-status()")
|
||||
}
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "status", "--porcelain=2", "-z")
|
||||
func (e *GitHandlerImpl) GitExecWithDataParse(cwd string, dataprocessor func(io.ByteReader) (Data, error), gitcmd string, args ...string) (Data, error) {
|
||||
LogDebug("getting", gitcmd)
|
||||
args = append([]string{gitcmd}, args...)
|
||||
cmd := exec.Command("/usr/bin/git", args...)
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
@@ -910,16 +937,133 @@ func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error)
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
log.Println(string(data))
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
LogDebug("command run:", cmd.Args)
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("Error running command %v, err: %v", cmd.Args, err)
|
||||
LogError("Error running command", cmd.Args, err)
|
||||
}
|
||||
|
||||
return parseGitStatusData(bufio.NewReader(bytes.NewReader(out)))
|
||||
return dataprocessor(bytes.NewReader(out))
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
data, err := e.GitExecWithDataParse(cwd, parseGitStatusData, "status", "--porcelain=2", "-z")
|
||||
return data.([]GitStatusData), err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitDiff(cwd, base, head string) (string, error) {
|
||||
LogDebug("getting diff from", base, "..", head)
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "diff", base+".."+head)
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
LogError(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
LogDebug("command run:", cmd.Args)
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
LogError("Error running command", cmd.Args, err)
|
||||
}
|
||||
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitDiffIndex(cwd, commit string) ([]GitDiffRawData, error) {
|
||||
data, err := e.GitExecWithDataParse("diff-index", parseGitDiffIndexRawData, cwd, "diff-index", "-z", "--raw", "--full-index", "--submodule=short", "HEAD")
|
||||
return data.([]GitDiffRawData), err
|
||||
}
|
||||
|
||||
func (git *GitHandlerImpl) GitResolveSubmoduleFileConflict(cwd string) error {
|
||||
status, err := git.GitStatus(cwd)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Status failed: %w", err)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
|
||||
submodules, err := git.GitSubmoduleList(cwd, "HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
|
||||
// We need to adjust the `submodules` list by the pending changes to the index
|
||||
|
||||
s1, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
for r := range submodules {
|
||||
LogError(r)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
for i := range mergedSubs {
|
||||
if path.Base(mergedSubs[i].Path) == subName {
|
||||
filteredSubs = append(filteredSubs, mergedSubs[i])
|
||||
continue nextSub
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("Cannot find submodule for path: %s", subName)
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), cwd, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
git.GitExecOrPanic(cwd, "add", ".gitmodules")
|
||||
git.GitExecOrPanic(cwd, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@@ -24,11 +24,163 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGitClone(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
repo string
|
||||
branch string
|
||||
remoteName string
|
||||
remoteUrl string
|
||||
}{
|
||||
{
|
||||
name: "Basic clone",
|
||||
repo: "pkgAclone",
|
||||
branch: "main",
|
||||
remoteName: "pkgA_main",
|
||||
remoteUrl: "/pkgA",
|
||||
},
|
||||
{
|
||||
name: "Remote branch is non-existent",
|
||||
repo: "pkgAclone",
|
||||
branch: "main_not_here",
|
||||
remoteName: "pkgA_main",
|
||||
remoteUrl: "/pkgA",
|
||||
},
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
execPath, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
d := t.TempDir()
|
||||
os.Chdir(d)
|
||||
defer os.Chdir(execPath)
|
||||
cmd := exec.Command(path.Join(execPath, "test_clone_setup.sh"))
|
||||
if _, err := cmd.Output(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(d, "Test", "test@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
g, err := gh.CreateGitHandler("org")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if _, err := g.GitClone(test.repo, test.branch, "file://"+d+test.remoteUrl); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
id, err := g.GitBranchHead(test.repo, test.branch)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Fatal(id)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSubmoduleConflictResolution(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
checkout, merge string
|
||||
result string
|
||||
}{
|
||||
{
|
||||
name: "adding two submodules",
|
||||
checkout: "base_add_b1",
|
||||
merge: "base_add_b2",
|
||||
result: `[submodule "pkgA"]
|
||||
path = pkgA
|
||||
url = ../pkgA
|
||||
[submodule "pkgB"]
|
||||
path = pkgB
|
||||
url = ../pkgB
|
||||
[submodule "pkgC"]
|
||||
path = pkgC
|
||||
url = ../pkgC
|
||||
[submodule "pkgB1"]
|
||||
path = pkgB1
|
||||
url = ../pkgB1
|
||||
[submodule "pkgB2"]
|
||||
path = pkgB2
|
||||
url = ../pkgB2
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
d, err := os.MkdirTemp(os.TempDir(), "submoduletests")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cmd := exec.Command(cwd + "/test_repo_setup.sh")
|
||||
cmd.Dir = d
|
||||
_, err = cmd.Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(d, "test", "foo@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
success := true
|
||||
noErrorOrFail := func(t *testing.T, err error) {
|
||||
if err != nil {
|
||||
t.Fatal(string(debug.Stack()), err)
|
||||
}
|
||||
}
|
||||
for _, test := range tests {
|
||||
success = t.Run(test.name, func(t *testing.T) {
|
||||
git, err := gh.ReadExistingPath("prjgit")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
noErrorOrFail(t, git.GitExec("", "checkout", "-B", "test", "main"))
|
||||
noErrorOrFail(t, git.GitExec("", "merge", "base_add_b1"))
|
||||
err = git.GitExec("", "merge", "base_add_b2")
|
||||
if err == nil {
|
||||
t.Fatal("expected a conflict")
|
||||
}
|
||||
err = git.GitResolveSubmoduleFileConflict("")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
data, err := os.ReadFile(git.GetPath() + "/.gitmodules")
|
||||
if err != nil {
|
||||
t.Fatal("Cannot read .gitmodules.", err)
|
||||
}
|
||||
|
||||
if string(data) != test.result {
|
||||
t.Error("Expected", test.result, "but have", string(data))
|
||||
}
|
||||
}) && success
|
||||
}
|
||||
|
||||
if success {
|
||||
os.RemoveAll(d)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitMsgParsing(t *testing.T) {
|
||||
t.Run("tree message with size 56", func(t *testing.T) {
|
||||
const hdr = "f40888ea4515fe2e8eea617a16f5f50a45f652d894de3ad181d58de3aafb8f98 tree 56\x00"
|
||||
@@ -136,7 +288,7 @@ committer Adam Majer <amajer@suse.com> 1720709149 +0200
|
||||
})
|
||||
|
||||
t.Run("parse multiline headers", func(t *testing.T) {
|
||||
const commitData = "cae5831ab48470ff060a5aaa12eb6e5a7acaf91e commit 1491\x00" +
|
||||
const commitData = "cae5831ab48470ff060a5aaa12eb6e5a7acaf91e commit 1492\000" +
|
||||
`tree 1f9c8fe8099615d6d3921528402ac53f09213b02
|
||||
parent e08a654fae0ecc91678819e0b62a2e014bad3339
|
||||
author Yagiz Nizipli <yagiz@nizipli.com> 1720967314 -0400
|
||||
@@ -168,7 +320,7 @@ Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
|
||||
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
|
||||
Reviewed-By: Ulises Gascón <ulisesgascongonzalez@gmail.com>
|
||||
Reviewed-By: Richard Lau <rlau@redhat.com>
|
||||
Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\000"
|
||||
|
||||
ch := make(chan byte, 5000)
|
||||
for _, b := range []byte(commitData) {
|
||||
@@ -189,6 +341,51 @@ Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse multiline headers", func(t *testing.T) {
|
||||
const commitData = "c07c52c57a10fb355956df3caad2986613838f149274fbe312ad76560764829d commit 1150\000" + `tree 3e06b280ea056141ed5e8af9794a41ae5281930c45321803eab53a240cb60044
|
||||
parent 19362a2cecb1fd25a89e03611d08ac68dcb1732f9dc0a68a40926356787fa4ca
|
||||
author Adrian Schröter <adrian@suse.de> 1746600403 +0200
|
||||
committer Adrian Schröter <adrian@suse.de> 1746600403 +0200
|
||||
gpgsig-sha256 -----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAABCgAdFiEE1QF1zm/pNbvyhgLFkY2MlUwI22cFAmgbAd0ACgkQkY2MlUwI
|
||||
22dxtA//eUCzIqxVdaEnOrFeTyxKig/mCOjaAyctmwr0vXUyElRtjXe4TzVG3QtR
|
||||
uDfhIrKYLZ2tU/0TewTW/4XopWxLuqEzVQLrjuYl7K5P3GoYk52W1yGT0szzm7/i
|
||||
87j4UdRL9YGU/gYO7nSzstcfTP6AcmYzVUoOnwYR0K2vyOVjO4niL3mFXxLkIgIt
|
||||
jd82xcE4JpQz9Yjyq2nDdz4A55kLAwsqY+dOct4oC6bZmj1/JeoGQfPvUsvsQgcI
|
||||
syCHVh0GBxjvSv50V/VPzxQTFMal/TdtvAD4kmP/9RDi/5THzus8Peam8pV0gEIC
|
||||
Q15ZcuLwIsC9i7ifUDYgzLgBBRdpSI0qji4Y6clWULPVjsyghgyfQw1trBSySpC8
|
||||
O1XfajUM+rXyrBLP6kzY+zl/zyzRdJ8JhljmC+SmNuyyEB77Hkn83k0f+aBhhqC2
|
||||
4b3fIsKtwJZ1w6gr6SSz1BottiT9ShQzRaL8iRoF/2l5MkHPR+QFg2J7EIBqCbCQ
|
||||
hFUjdvWAXQBWkkTQlJmLmJBXDOLQg3o6xCbnZM0gPFjZWE7e3Mpky7H0+xPnoeg9
|
||||
ukuvkexXQ6yrdiekA7HRLc76Te/I0m7KDOOWZ3rbJV6uH/3ps4FbLQTZO12AtZ6J
|
||||
n8hYdYfw9yjCxiKUjnEtXtDRe8DJpqv+hO0Wj4MI5gIA2JE2lzY=
|
||||
=Keg5
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
dummy change, don't merge
|
||||
` + "\000"
|
||||
ch := make(chan byte)
|
||||
go func() {
|
||||
for _, b := range []byte(commitData) {
|
||||
ch <- b
|
||||
}
|
||||
}()
|
||||
commit, err := parseGitCommit(ch)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if commit.Tree != "3e06b280ea056141ed5e8af9794a41ae5281930c45321803eab53a240cb60044" {
|
||||
t.Errorf("Invalid commit object: %#v", commit)
|
||||
}
|
||||
|
||||
if commit.Msg != "dummy change, don't merge\n" {
|
||||
t.Errorf("Invalid commit msg: '%s'", commit.Msg)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse tree object", func(t *testing.T) {
|
||||
const treeData = "\x31\x61\x30\x35\x64\x62\x37\x33\x36\x39\x33\x37\x34\x33\x30\x65\x31\x38\x64\x66\x34\x33\x61\x32\x37\x61\x39\x38\x30\x30\x31\x30\x31\x32\x65\x31\x65\x64\x32\x30\x34\x38\x32\x39\x38\x36\x37\x31\x32\x38\x66\x32\x63\x65\x38\x34\x30\x36\x62\x35\x63\x66\x63\x39\x20\x74\x72\x65\x65\x20\x32\x30\x35\x00\x34\x30\x30\x30\x30\x20\x62\x6f\x74\x73\x2d\x63\x6f\x6d\x6d\x6f\x6e\x00\x93\x17\xaa\x47\xf6\xea\x37\xe8\xbc\xe2\x80\x77\x57\x90\xf4\xa8\x01\xd7\xe3\x70\x2f\x84\xfb\xe1\xb0\x0e\x4a\x2c\x1c\x75\x2c\x2b\x34\x30\x30\x30\x30\x20\x6f\x62\x73\x2d\x73\x74\x61\x67\x69\x6e\x67\x2d\x62\x6f\x74\x00\x79\x77\x8b\x28\x7d\x37\x10\x59\xb9\x71\x28\x36\xed\x20\x31\x5f\xfb\xe1\xed\xb5\xba\x4f\x5e\xbb\x65\x65\x68\x23\x77\x32\x58\xfe\x34\x30\x30\x30\x30\x20\x70\x72\x2d\x72\x65\x76\x69\x65\x77\x00\x36\x0d\x45\xcb\x76\xb8\x93\xb3\x21\xba\xfa\xd5\x00\x9d\xfc\x59\xab\x88\xc1\x3c\x81\xcb\x48\x5a\xe0\x29\x29\x0f\xe3\x6b\x3c\x5e\x34\x30\x30\x30\x30\x20\x70\x72\x6a\x67\x69\x74\x2d\x75\x70\x64\x61\x74\x65\x72\x00\xb4\x0b\x1c\xf5\xfb\xec\x9a\xb2\x9f\x48\x3e\x21\x18\x0d\x51\xb7\x98\x6e\x21\x99\x74\x84\x67\x71\x41\x24\x42\xfc\xc9\x04\x12\x99\x00"
|
||||
|
||||
@@ -243,9 +440,36 @@ Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
t.Error("expected submodule not found")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse nested trees with subtrees", func(t *testing.T) {
|
||||
const data = "873a323b262ebb3bd77b2592b2e11bdd08dbc721cbf4ac9f97637e58e1fffce7 tree 1083\x00100644\x20\x2Egitattributes\x00\xD8v\xA95\x87\xC1\xA9\xFCPn\xDD\xD4\x13\x9B\x8E\xD2\xCFs\xBD\x11q\x8A\xAE\x8A\x7Cg\xE2C\x14J\x01\xB0100644\x20\x2Egitignore\x00\xC3\xCD\x8En\x887\x3AJ\xA0P\xEEL\xD4\xF5\xD2v\x9C\xA6v\xC5D\x60\x40\x95\xD1\x0B\xA4\xB8\x86\xD4rE100644\x20COPYING\x00\x12\x2A\x28\xC8\xB9\x5D\x9B\x8A\x23\x1F\xE96\x07\x3F\xA9D\x90\xFD\xCE\x2Bi\x2D\x031\x5C\xCC\xC4fx\x00\xC22100644\x20README\x2Emd\x00\x92D\xF7\xFF\x0E0\x5C\xF2\xAC\x0DA\x06\x92\x0B\xD6z\x3CGh\x00y\x7EW1\xB9a\x8Ch\x215Fa100644\x20_service\x00\xC51\xF2\x12\xF3\x24\x9C\xD9\x9F\x0A\x93Mp\x12\xC1\xF7i\x05\x95\xC5Z\x06\x95i\x3Az\xC3\xF59\x7E\xF8\x1B100644\x20autogits\x2Echanges\x00\xF7\x8D\xBF\x0A\xCB\x5D\xB7y\x8C\xA9\x9C\xEB\x92\xAFd\x2C\x98\x23\x0C\x13\x13\xED\xDE\x5D\xBALD6\x3BR\x5B\xCA100644\x20autogits\x2Espec\x00\xD2\xBC\x20v\xD3\xE5F\xCA\xEE\xEA\x18\xC84\x0D\xA7\xCA\xD8O\xF2\x0A\xAB\x40\x2A\xFAL\x3B\xB4\xE6\x11\xE7o\xD140000\x20common\x00\xE2\xC9dg\xD0\x5D\xD1\xF1\x8ARW\xF0\x96\xD6\x29\x2F\x8F\xD9\xC7\x82\x1A\xB7\xAAw\xB0\xCE\xA8\xFE\xC8\xD7D\xF2100755\x20dev_test_helper\x2Esh\x00\xECY\xDD\xB3rz\x9Fh\xD4\x2E\x85\x02\x13\xF8\xFE\xB57\x8B\x1B6\x8E\x09dC\x1E\xE0\x90\x09\x08\xED\xBD_40000\x20devel\x2Dimporter\x00v\x98\x9B\x92\xD8\x24lu\xFC\xB2d\xC9\xCENb\xEE\x0F\x21\x8B\x92\x88\xDBs\xF8\x2E\xA8\xC8W\x1C\x20\xCF\xD440000\x20doc\x00\x8Akyq\xD0\xCF\xB8\x2F\x80Y\x2F\x11\xF0\x14\xA9\xFE\x96\x14\xE0W\x2C\xCF\xB9\x86\x7E\xFDi\xD7\x1F\x08Q\xFB40000\x20gitea\x2Devents\x2Drabbitmq\x2Dpublisher\x00\x5Cb\x3Fh\xA2\x06\x06\x0Cd\x09\xA5\xD9\xF7\x23\x5C\xF85\xF5\xB8\xBE\x7F\xD4O\x25t\xEF\xCC\xAB\x18\x7C\x0C\xF3100644\x20go\x2Emod\x00j\x85\x0B\x03\xC8\x9F\x9F\x0F\xC8\xE0\x8C\xF7\x3D\xC19\xF7\x12gk\xD6\x18JN\x24\xC0\x1C\xBE\x97oY\x02\x8D100644\x20go\x2Esum\x00h\x88\x2E\x27\xED\xD39\x8D\x12\x0F\x7D\x97\xA2\x5DE\xB9\x82o\x0Cu\xF4l\xA17s\x28\x2BQT\xE6\x12\x9040000\x20group\x2Dreview\x00\x7E\x7B\xB42\x0F\x3B\xC9o\x2C\xE79\x1DR\xE2\xE4i\xAE\xF6u\x90\x09\xD8\xC9c\xE7\xF7\xC7\x92\xFB\xD7\xDD140000\x20obs\x2Dstaging\x2Dbot\x00\x12\xE8\xAF\x09\xD4\x5D\x13\x8D\xC9\x0AvPDc\xB6\x7C\xAC4\xD9\xC5\xD4_\x98i\xBE2\xA7\x25aj\xE2k40000\x20obs\x2Dstatus\x2Dservice\x00MATY\xA3\xFA\xED\x05\xBE\xEB\x2B\x07\x9CN\xA9\xF3SB\x22MlV\xA4\x5D\xDA\x0B\x0F\x23\xA1\xA8z\xD740000\x20systemd\x00\x2D\xE2\x03\x7E\xBD\xEB6\x8F\xC5\x0E\x12\xD4\xBD\x97P\xDD\xA2\x92\xCE6n\x08Q\xCA\xE4\x15\x97\x8F\x26V\x3DW100644\x20vendor\x2Etar\x2Ezst\x00\xD9\x2Es\x03I\x91\x22\x24\xC86q\x91\x95\xEF\xA3\xC9\x3C\x06D\x90w\xAD\xCB\xAE\xEEu2i\xCE\x05\x09u40000\x20workflow\x2Ddirect\x00\x94\xDB\xDFc\xB5A\xD5\x16\xB3\xC3ng\x94J\xE7\x101jYF\x15Q\xE97\xCFg\x14\x12\x28\x3A\xFC\xDB40000\x20workflow\x2Dpr\x00\xC1\xD8Z9\x18\x60\xA2\xE2\xEF\xB0\xFC\xD7\x2Ah\xF07\x0D\xEC\x8A7\x7E\x1A\xAAn\x13\x9C\xEC\x05s\xE8\xBDf\x00"
|
||||
|
||||
ch := make(chan byte, 2000)
|
||||
for _, b := range []byte(data) {
|
||||
ch <- b
|
||||
}
|
||||
|
||||
tree, err := parseGitTree(ch)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, item := range tree.items {
|
||||
t.Log(item)
|
||||
if item.name == "workflow-pr" && item.hash == "c1d85a391860a2e2efb0fcd72a68f0370dec8a377e1aaa6e139cec0573e8bd66" && item.isTree() {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("expected submodule not found")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
func TestCommitTreeParsing(t *testing.T) {
|
||||
gitDir := t.TempDir()
|
||||
testDir, _ := os.Getwd()
|
||||
var commitId string
|
||||
@@ -260,11 +484,58 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(gitDir, "", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Run("GitCatFile commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", commitId, "help")
|
||||
if err != nil {
|
||||
t.Error("failed", err)
|
||||
}
|
||||
|
||||
if string(file) != "help\n" {
|
||||
t.Error("expected 'help\\n' but got", string(file))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("GitCatFile commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", "HEAD", "help")
|
||||
if err != nil {
|
||||
t.Error("failed", err)
|
||||
}
|
||||
|
||||
if string(file) != "help\n" {
|
||||
t.Error("expected 'help\\n' but got", string(file))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("GitCatFile bad commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", "518b468f391bf01d5d76d497d7cbecfa8b46d185714cf8745800ae18afb21afd", "help")
|
||||
if err == nil {
|
||||
t.Error("expected error, but not nothing")
|
||||
}
|
||||
|
||||
if string(file) != "" {
|
||||
t.Error("expected 'help\\n' but got", file)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("reads HEAD and parses the tree", func(t *testing.T) {
|
||||
const nodejs21 = "c678c57007d496a98bec668ae38f2c26a695f94af78012f15d044ccf066ccb41"
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
id, ok := h.GitSubmoduleCommitId("", "nodejs21", commitId)
|
||||
if !ok {
|
||||
t.Error("failed parse")
|
||||
@@ -275,9 +546,9 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("reads README.md", func(t *testing.T) {
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
data, err := h.GitCatFile("", commitId, "README.md")
|
||||
if err != nil {
|
||||
t.Errorf("failed parse: %v", err)
|
||||
@@ -288,9 +559,8 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("read HEAD", func(t *testing.T) {
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
data, err := h.GitSubmoduleList("", "HEAD")
|
||||
if err != nil {
|
||||
@@ -373,6 +643,8 @@ func TestGitStatusParse(t *testing.T) {
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Unmerged,
|
||||
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
|
||||
|
||||
SubmoduleChanges: "N...",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -380,14 +652,13 @@ func TestGitStatusParse(t *testing.T) {
|
||||
name: "Renamed file",
|
||||
data: []byte("1 M. N... 100644 100644 100644 d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c 896cd09f36d39e782d66ae32dd5614d4f4d83fc689f132aab2dfc019a9f5b6f3 .gitmodules\x002 R. S... 160000 160000 160000 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 R100 pkgQ\x00pkgC\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "pkgQ",
|
||||
{
|
||||
Path: "pkgQ",
|
||||
Status: GitStatus_Renamed,
|
||||
States: [3]string{"pkgC"},
|
||||
|
||||
},
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
},
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user