Files
autogits/common/maintainership.go

333 lines
7.7 KiB
Go

package common
import (
"bytes"
"encoding/json"
"fmt"
"io"
"slices"
"strings"
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
"src.opensuse.org/autogits/common/gitea-generated/models"
)
//go:generate mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
type MaintainershipData interface {
ListProjectMaintainers(OptionalGroupExpansion []*ReviewGroup) []string
ListPackageMaintainers(Pkg string, OptionalGroupExpasion []*ReviewGroup) []string
IsApproved(Pkg string, Reviews []*models.PullReview, Submitter string, ReviewGroups []*ReviewGroup) bool
}
const ProjectKey = ""
const ProjectFileKey = "_project"
type MaintainershipMap struct {
Data map[string][]string
IsDir bool
FetchPackage func(string) ([]byte, error)
Raw []byte
}
func ParseMaintainershipData(data []byte) (*MaintainershipMap, error) {
maintainers := &MaintainershipMap{
Data: make(map[string][]string),
Raw: data,
}
if err := json.Unmarshal(data, &maintainers.Data); err != nil {
return nil, err
}
return maintainers, nil
}
func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit, branch string) (*MaintainershipMap, error) {
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, ProjectFileKey)
dir := true
if err != nil || data == nil {
dir = false
if _, notFound := err.(*repository.RepoGetContentsNotFound); !notFound {
return nil, err
}
LogDebug("Falling back to maintainership file")
data, _, err = gitea.FetchMaintainershipFile(org, prjGit, branch)
if err != nil || data == nil {
if _, notFound := err.(*repository.RepoGetContentsNotFound); !notFound {
return nil, err
}
// no mainatiners
data = []byte("{}")
}
}
m, err := ParseMaintainershipData(data)
if m != nil {
m.IsDir = dir
m.FetchPackage = func(pkg string) ([]byte, error) {
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, pkg)
return data, err
}
}
return m, err
}
func (data *MaintainershipMap) ListProjectMaintainers(groups []*ReviewGroup) []string {
if data == nil {
return nil
}
m, found := data.Data[ProjectKey]
if !found {
return nil
}
// expands groups
for _, g := range groups {
m = g.ExpandMaintainers(m)
}
return m
}
func parsePkgDirData(pkg string, data []byte) []string {
m := make(map[string][]string)
if err := json.Unmarshal(data, &m); err != nil {
return nil
}
pkgMaintainers, found := m[pkg]
if !found {
return nil
}
return pkgMaintainers
}
func (data *MaintainershipMap) ListPackageMaintainers(pkg string, groups []*ReviewGroup) []string {
if data == nil {
return nil
}
pkgMaintainers, found := data.Data[pkg]
if !found && data.IsDir {
pkgData, err := data.FetchPackage(pkg)
if err == nil {
pkgMaintainers = parsePkgDirData(pkg, pkgData)
if len(pkgMaintainers) > 0 {
data.Data[pkg] = pkgMaintainers
}
}
}
prjMaintainers := data.ListProjectMaintainers(nil)
prjMaintainer:
for _, prjm := range prjMaintainers {
for i := range pkgMaintainers {
if pkgMaintainers[i] == prjm {
continue prjMaintainer
}
}
pkgMaintainers = append(pkgMaintainers, prjm)
}
// expands groups
for _, g := range groups {
pkgMaintainers = g.ExpandMaintainers(pkgMaintainers)
}
return pkgMaintainers
}
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview, submitter string, groups []*ReviewGroup) bool {
var reviewers []string
if pkg != ProjectKey {
reviewers = data.ListPackageMaintainers(pkg, groups)
} else {
reviewers = data.ListProjectMaintainers(groups)
}
if len(reviewers) == 0 {
return true
}
LogDebug("Looking for review by:", reviewers)
if slices.Contains(reviewers, submitter) {
LogDebug("Submitter is maintainer. Approving.")
return true
}
for _, review := range reviews {
if !review.Stale && review.State == ReviewStateApproved && slices.Contains(reviewers, review.User.UserName) {
LogDebug("Reviewed by", review.User.UserName)
return true
}
}
return false
}
func (data *MaintainershipMap) modifyInplace(writer io.StringWriter) error {
var original map[string][]string
if err := json.Unmarshal(data.Raw, &original); err != nil {
return err
}
dec := json.NewDecoder(bytes.NewReader(data.Raw))
_, err := dec.Token()
if err != nil {
return err
}
output := ""
lastPos := 0
modified := false
type entry struct {
key string
valStart int
valEnd int
}
var entries []entry
for dec.More() {
kToken, _ := dec.Token()
key := kToken.(string)
var raw json.RawMessage
dec.Decode(&raw)
valEnd := int(dec.InputOffset())
valStart := valEnd - len(raw)
entries = append(entries, entry{key, valStart, valEnd})
}
changed := make(map[string]bool)
for k, v := range data.Data {
if ov, ok := original[k]; !ok || !slices.Equal(v, ov) {
changed[k] = true
}
}
for k := range original {
if _, ok := data.Data[k]; !ok {
changed[k] = true
}
}
if len(changed) == 0 {
_, err = writer.WriteString(string(data.Raw))
return err
}
for _, e := range entries {
if v, ok := data.Data[e.key]; ok {
prefix := string(data.Raw[lastPos:e.valStart])
if modified && strings.TrimSpace(output) == "{" {
if commaIdx := strings.Index(prefix, ","); commaIdx != -1 {
if quoteIdx := strings.Index(prefix, "\""); quoteIdx == -1 || commaIdx < quoteIdx {
prefix = prefix[:commaIdx] + prefix[commaIdx+1:]
}
}
}
output += prefix
if changed[e.key] {
slices.Sort(v)
newVal, _ := json.Marshal(v)
output += string(newVal)
modified = true
} else {
output += string(data.Raw[e.valStart:e.valEnd])
}
} else {
// Deleted
modified = true
}
lastPos = e.valEnd
}
output += string(data.Raw[lastPos:])
// Handle additions (simplistic: at the end)
for k, v := range data.Data {
if _, ok := original[k]; !ok {
slices.Sort(v)
newVal, _ := json.Marshal(v)
keyStr, _ := json.Marshal(k)
// Insert before closing brace
if idx := strings.LastIndex(output, "}"); idx != -1 {
prefix := output[:idx]
suffix := output[idx:]
trimmedPrefix := strings.TrimRight(prefix, " \n\r\t")
if !strings.HasSuffix(trimmedPrefix, "{") && !strings.HasSuffix(trimmedPrefix, ",") {
// find the actual position of the last non-whitespace character in prefix
lastCharIdx := strings.LastIndexAny(prefix, "]}0123456789\"")
if lastCharIdx != -1 {
prefix = prefix[:lastCharIdx+1] + "," + prefix[lastCharIdx+1:]
}
}
insertion := fmt.Sprintf(" %s: %s", string(keyStr), string(newVal))
if !strings.HasSuffix(prefix, "\n") {
insertion = "\n" + insertion
}
output = prefix + insertion + "\n" + suffix
modified = true
}
}
}
if modified {
_, err := writer.WriteString(output)
return err
}
_, err = writer.WriteString(string(data.Raw))
return err
}
func (data *MaintainershipMap) WriteMaintainershipFile(writer io.StringWriter) error {
if data.IsDir {
return fmt.Errorf("Not implemented")
}
if len(data.Raw) > 0 {
if err := data.modifyInplace(writer); err == nil {
return nil
}
}
// Fallback to full write
writer.WriteString("{\n")
if d, ok := data.Data[""]; ok {
eol := ","
if len(data.Data) == 1 {
eol = ""
}
slices.Sort(d)
str, _ := json.Marshal(d)
writer.WriteString(fmt.Sprintf(" \"\": %s%s\n", string(str), eol))
}
keys := make([]string, 0, len(data.Data))
for pkg := range data.Data {
if pkg == "" {
continue
}
keys = append(keys, pkg)
}
slices.Sort(keys)
for i, pkg := range keys {
eol := ","
if i == len(keys)-1 {
eol = ""
}
maintainers := data.Data[pkg]
slices.Sort(maintainers)
pkgStr, _ := json.Marshal(pkg)
maintainersStr, _ := json.Marshal(maintainers)
writer.WriteString(fmt.Sprintf(" %s: %s%s\n", pkgStr, maintainersStr, eol))
}
writer.WriteString("}\n")
return nil
}