forked from git-workflow/test
Compare commits
2 Commits
| Author | SHA256 | Date | |
|---|---|---|---|
|
|
f264c1ec86 | ||
|
|
0c1d423e91 |
29
.gitea/workflows/test.yaml
Normal file
29
.gitea/workflows/test.yaml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: test-podman-compose
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
HOME: /root
|
||||||
|
REPO_PATH: /workspace/${{ gitea.repository }}
|
||||||
|
REPO_URL: https://gitea-actions-autobuild:${{ secrets.REPO_WRITE }}@$RUNNER_GITEA_DOMAIN/${{ gitea.repository }}.git
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
submodule-check:
|
||||||
|
runs-on: linux-x86_64
|
||||||
|
steps:
|
||||||
|
- name: Checkout Code
|
||||||
|
run: |
|
||||||
|
test -n "${{ env.REPO_PATH }}" && rm -rfv "${{ env.REPO_PATH }}"/*
|
||||||
|
git config --global --add safe.directory ${{ env.REPO_PATH }}
|
||||||
|
git clone ${{ env.REPO_URL }} ${{ env.REPO_PATH }}
|
||||||
|
|
||||||
|
- name: Run make local
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd giwtf
|
||||||
|
make test_local
|
||||||
|
|
||||||
21
giwtf/.gitattributes
vendored
Normal file
21
giwtf/.gitattributes
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bsp filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gem filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.jar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lzma filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.oxt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pdf filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.png filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rpm filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tbz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tbz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ttf filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.txz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.whl filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.changes merge=merge-changes
|
||||||
20
giwtf/.gitignore
vendored
Normal file
20
giwtf/.gitignore
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
gitea-data
|
||||||
|
gitea-logs
|
||||||
|
rabbitmq-data
|
||||||
|
workflow-pr-repos
|
||||||
|
__pycache__/
|
||||||
|
/workflow-pr/workflow-pr
|
||||||
|
/obs-staging-bot/obs-staging-bot
|
||||||
|
/gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher
|
||||||
|
*.obscpio
|
||||||
|
*.osc
|
||||||
|
_build.*
|
||||||
|
.pbuild
|
||||||
|
*.obscpio
|
||||||
|
*.osc
|
||||||
|
_build.*
|
||||||
|
.pbuild
|
||||||
|
*.obscpio
|
||||||
|
*.osc
|
||||||
|
_build.*
|
||||||
|
.pbuild
|
||||||
11
giwtf/Dockerfile
Normal file
11
giwtf/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
FROM opensuse/tumbleweed
|
||||||
|
ENV container=podman
|
||||||
|
|
||||||
|
ENV LANG=en_US.UTF-8
|
||||||
|
|
||||||
|
RUN zypper -vvvn install podman podman-compose vim make python3-pytest python3-requests
|
||||||
|
|
||||||
|
WORKDIR /opt/project
|
||||||
|
|
||||||
|
COPY . /opt/project/
|
||||||
83
giwtf/Makefile
Normal file
83
giwtf/Makefile
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# We want to be able to test in two **modes**:
|
||||||
|
# A. bots are used from official packages as defined in */Dockerfile.package
|
||||||
|
# B. bots are just picked up from binaries that are placed in corresponding directory. (Use 'make copy_binaries' to copy files from parent directory)
|
||||||
|
|
||||||
|
# The topology is defined in podman-compose file and can be spawned in two ways:
|
||||||
|
# 1. Privileged container (needs no additional dependancies)
|
||||||
|
# 2. podman-compose on a local machine (needs dependencies as defined in the Dockerfile)
|
||||||
|
|
||||||
|
|
||||||
|
# Typical workflow:
|
||||||
|
# A1:
|
||||||
|
# 1. clone this repo
|
||||||
|
# 2. run 'make test_package' (or 'make test' should detect this mode)
|
||||||
|
# B1:
|
||||||
|
# 1. clone workflow-pr/autogits repo with this repo as submodule
|
||||||
|
# 2. build binaries in workflow-pr/autogits
|
||||||
|
# 3. cd to the submodule
|
||||||
|
# 4. run 'make test_local' (or 'make test' should detect this mode)
|
||||||
|
# A2.
|
||||||
|
# 1. clone this repo
|
||||||
|
# 2. 'make build' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
|
||||||
|
# 3. 'make up' - spawns podman-compose
|
||||||
|
# 4. 'pytest -v tests/*' - run tests
|
||||||
|
# 5. 'make down' - once the containers are not needed
|
||||||
|
# B2:
|
||||||
|
# 1. clone workflow-pr/autogits with the submodule
|
||||||
|
# 2. build go binaries
|
||||||
|
# 3. cd to the submodule
|
||||||
|
# 4. Run steps 2-5 from A2
|
||||||
|
|
||||||
|
|
||||||
|
AUTO_DETECT_MODE := $(shell if test -e ../workflow-pr/workflow-pr; then echo .local; else echo .package; fi)
|
||||||
|
|
||||||
|
# try to detect mode B1, otherwise mode A1
|
||||||
|
test: GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE)
|
||||||
|
test: copy_binaries_if_needed build_container test_container
|
||||||
|
|
||||||
|
# mode A1
|
||||||
|
test_package: GIWTF_IMAGE_SUFFIX=.package
|
||||||
|
test_package: build_container test_container
|
||||||
|
|
||||||
|
# mode B1
|
||||||
|
test_local: GIWTF_IMAGE_SUFFIX=.local
|
||||||
|
test_local: build_container test_container
|
||||||
|
|
||||||
|
MODULES := gitea-events-rabbitmq-publisher obs-staging-bot workflow-pr
|
||||||
|
|
||||||
|
copy_binaries:
|
||||||
|
for f in $(MODULES); do cp ../$$f/$$f $$f/; done
|
||||||
|
|
||||||
|
copy_binaries_if_needed:
|
||||||
|
test .package == ""$(GIWTF_IMAGE_SUFFIX) || for f in $(MODULES); do cp ../$$f/$$f $$f/; done
|
||||||
|
|
||||||
|
# Prepare topology 1
|
||||||
|
build_container:
|
||||||
|
podman build . -t autogits_integration
|
||||||
|
|
||||||
|
# Run tests in topology 1
|
||||||
|
test_container:
|
||||||
|
podman run --rm --privileged -t -e GIWTF_IMAGE_SUFFIX=$(GIWTF_IMAGE_SUFFIX) autogits_integration /usr/bin/bash -c "make build && make up && sleep 15 && pytest -v tests/*"
|
||||||
|
|
||||||
|
|
||||||
|
# parse all service images from podman-compose and build them (topology 2)
|
||||||
|
build:
|
||||||
|
podman pull docker.io/library/rabbitmq:3.13.7-management
|
||||||
|
for i in $$(grep -A 1000 services: podman-compose.yml | grep -oE '^ [^: ]+'); do podman-compose build $$i || exit 1; done
|
||||||
|
|
||||||
|
# this will spawn prebuilt containers (topology 2)
|
||||||
|
up:
|
||||||
|
podman-compose up -d
|
||||||
|
|
||||||
|
# tear down (topology 2)
|
||||||
|
down:
|
||||||
|
podman-compose down
|
||||||
|
|
||||||
|
# mode A
|
||||||
|
up-bots-package:
|
||||||
|
GIWTF_IMAGE_SUFFIX=.package podman-compose up -d
|
||||||
|
|
||||||
|
# mode B
|
||||||
|
up-bots-local:
|
||||||
|
GIWTF_IMAGE_SUFFIX=.local podman-compose up -d
|
||||||
|
|
||||||
1
giwtf/clean.sh
Executable file
1
giwtf/clean.sh
Executable file
@@ -0,0 +1 @@
|
|||||||
|
sudo rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/
|
||||||
2
giwtf/create_user.sh
Executable file
2
giwtf/create_user.sh
Executable file
@@ -0,0 +1,2 @@
|
|||||||
|
podman-compose exec gitea su gitea -c "gitea admin user create --username at --password test --email at@test.it --access-token"
|
||||||
|
podman-compose exec gitea su gitea -c "gitea admin user generate-access-token --username at --token-name test --scopes all --raw"
|
||||||
1
giwtf/gitea-events-rabbitmq-publisher/Dockerfile
Symbolic link
1
giwtf/gitea-events-rabbitmq-publisher/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
Dockerfile.package
|
||||||
13
giwtf/gitea-events-rabbitmq-publisher/Dockerfile.local
Normal file
13
giwtf/gitea-events-rabbitmq-publisher/Dockerfile.local
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
# Add the custom CA to the trust store
|
||||||
|
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
RUN update-ca-certificates
|
||||||
|
|
||||||
|
# Copy the pre-built binary into the container
|
||||||
|
# The user will build this and place it in the same directory as this Dockerfile
|
||||||
|
COPY gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher /usr/local/bin/
|
||||||
|
COPY gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
15
giwtf/gitea-events-rabbitmq-publisher/Dockerfile.package
Normal file
15
giwtf/gitea-events-rabbitmq-publisher/Dockerfile.package
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
# Add the custom CA to the trust store
|
||||||
|
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
RUN update-ca-certificates
|
||||||
|
|
||||||
|
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||||
|
RUN zypper --gpg-auto-import-keys ref
|
||||||
|
|
||||||
|
RUN zypper -n in git-core curl autogits-gitea-events-rabbitmq-publisher binutils
|
||||||
|
|
||||||
|
COPY gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
12
giwtf/gitea-events-rabbitmq-publisher/entrypoint.sh
Normal file
12
giwtf/gitea-events-rabbitmq-publisher/entrypoint.sh
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
exe=$(which gitea-events-rabbitmq-publisher)
|
||||||
|
exe=${exe:-/usr/local/bin/gitea-events-rabbitmq-publisher}
|
||||||
|
|
||||||
|
package=$(rpm -qa | grep autogits-gitea-events-rabbitmq-publisher) || :
|
||||||
|
|
||||||
|
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||||
|
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||||
|
|
||||||
|
exec $exe "$@"
|
||||||
25
giwtf/gitea/Dockerfile
Normal file
25
giwtf/gitea/Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
RUN zypper ar --repo https://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo \
|
||||||
|
&& zypper -n --gpg-auto-import-keys refresh
|
||||||
|
|
||||||
|
RUN zypper -n install \
|
||||||
|
git \
|
||||||
|
sqlite3 \
|
||||||
|
curl \
|
||||||
|
gawk \
|
||||||
|
openssh \
|
||||||
|
jq \
|
||||||
|
devel_Factory_git-workflow:gitea \
|
||||||
|
&& rm -rf /var/cache/zypp/*
|
||||||
|
|
||||||
|
# Copy the minimal set of required files from the local 'container-files' directory
|
||||||
|
COPY container-files/ /
|
||||||
|
|
||||||
|
RUN chmod -R 777 /etc/gitea/conf
|
||||||
|
|
||||||
|
# Make the setup and entrypoint scripts executable
|
||||||
|
RUN chmod +x /opt/setup/setup-gitea.sh && chmod +x /opt/setup/entrypoint.sh && chmod +x /opt/setup/setup-webhook.sh && chmod +x /opt/setup/setup-dummy-data.sh
|
||||||
|
|
||||||
|
# Use the new entrypoint script to start the container
|
||||||
|
ENTRYPOINT ["/opt/setup/entrypoint.sh"]
|
||||||
45
giwtf/gitea/container-files/etc/gitea/conf/app.ini
Normal file
45
giwtf/gitea/container-files/etc/gitea/conf/app.ini
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
WORK_PATH = /var/lib/gitea
|
||||||
|
|
||||||
|
[server]
|
||||||
|
CERT_FILE = /etc/gitea/https/cert.pem
|
||||||
|
KEY_FILE = /etc/gitea/https/key.pem
|
||||||
|
STATIC_ROOT_PATH = /usr/share/gitea
|
||||||
|
APP_DATA_PATH = /var/lib/gitea/data
|
||||||
|
PPROF_DATA_PATH = /var/lib/gitea/data/tmp/pprof
|
||||||
|
PROTOCOL = http
|
||||||
|
DOMAIN = gitea-test
|
||||||
|
SSH_DOMAIN = gitea-test
|
||||||
|
ROOT_URL = http://gitea-test:3000/
|
||||||
|
HTTP_PORT = 3000
|
||||||
|
DISABLE_SSH = false
|
||||||
|
START_SSH_SERVER = true
|
||||||
|
SSH_PORT = 3022
|
||||||
|
LFS_START_SERVER = true
|
||||||
|
LFS_ALLOW_PURE_SSH = true
|
||||||
|
LFS_JWT_SECRET = 18704d478f73cc03bca8f25f1e669177f0e2ec4cb75
|
||||||
|
|
||||||
|
|
||||||
|
[lfs]
|
||||||
|
PATH = /var/lib/gitea/data/lfs
|
||||||
|
|
||||||
|
[database]
|
||||||
|
DB_TYPE = sqlite3
|
||||||
|
PATH = /var/lib/gitea/data/gitea.db
|
||||||
|
|
||||||
|
[security]
|
||||||
|
INSTALL_LOCK = true
|
||||||
|
|
||||||
|
[oauth2]
|
||||||
|
ENABLED = false
|
||||||
|
|
||||||
|
[log]
|
||||||
|
ROOT_PATH = /var/log/gitea
|
||||||
|
MODE = console, file
|
||||||
|
; Either "Trace", "Debug", "Info", "Warn", "Error" or "None", default is "Info"
|
||||||
|
LEVEL = Debug
|
||||||
|
|
||||||
|
[service]
|
||||||
|
ENABLE_BASIC_AUTHENTICATION = true
|
||||||
|
|
||||||
|
[webhook]
|
||||||
|
ALLOWED_HOST_LIST = gitea-publisher
|
||||||
19
giwtf/gitea/container-files/opt/setup/entrypoint.sh
Normal file
19
giwtf/gitea/container-files/opt/setup/entrypoint.sh
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Run setup to ensure permissions, migrations, and the admin user are ready.
|
||||||
|
# The setup script is now idempotent.
|
||||||
|
/opt/setup/setup-gitea.sh
|
||||||
|
|
||||||
|
# Start the webhook setup script in the background.
|
||||||
|
# It will wait for the main Gitea process to be ready before creating the webhook.
|
||||||
|
/opt/setup/setup-webhook.sh &
|
||||||
|
|
||||||
|
echo "Starting Gitea..."
|
||||||
|
|
||||||
|
# The original systemd service ran as user 'gitea' and group 'gitea'
|
||||||
|
# with a working directory of '/var/lib/gitea'.
|
||||||
|
# We will switch to that user and run the web command.
|
||||||
|
# Using exec means Gitea will become PID 1, allowing it to receive signals correctly.
|
||||||
|
cd /var/lib/gitea
|
||||||
|
exec su -s /bin/bash gitea -c "/usr/bin/gitea web --config /etc/gitea/conf/app.ini"
|
||||||
207
giwtf/gitea/container-files/opt/setup/setup-dummy-data.sh
Normal file
207
giwtf/gitea/container-files/opt/setup/setup-dummy-data.sh
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
GITEA_URL=$1
|
||||||
|
TOKEN=$2
|
||||||
|
ADMIN_USER="admin"
|
||||||
|
|
||||||
|
create_org() {
|
||||||
|
ORG_NAME=$1
|
||||||
|
echo "--- Checking organization: $ORG_NAME ---"
|
||||||
|
# Check if org exists
|
||||||
|
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X GET -H "Authorization: token $TOKEN" "$GITEA_URL/api/v1/orgs/$ORG_NAME")
|
||||||
|
|
||||||
|
if [ "$HTTP_STATUS" -eq 200 ]; then
|
||||||
|
echo "Organization '$ORG_NAME' already exists."
|
||||||
|
else
|
||||||
|
echo "Creating organization '$ORG_NAME'..."
|
||||||
|
curl -s -X POST "$GITEA_URL/api/v1/orgs" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{
|
||||||
|
\"username\": \"$ORG_NAME\",
|
||||||
|
\"full_name\": \"$ORG_NAME\"
|
||||||
|
}"
|
||||||
|
echo -e "\nOrganization '$ORG_NAME' created."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
create_repo() {
|
||||||
|
ORG_NAME=$1
|
||||||
|
REPO_NAME=$2
|
||||||
|
echo "--- Checking repository: $ORG_NAME/$REPO_NAME ---"
|
||||||
|
|
||||||
|
# Check if repo exists
|
||||||
|
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X GET -H "Authorization: token $TOKEN" "$GITEA_URL/api/v1/repos/$ORG_NAME/$REPO_NAME")
|
||||||
|
|
||||||
|
if [ "$HTTP_STATUS" -eq 200 ]; then
|
||||||
|
echo "Repository '$ORG_NAME/$REPO_NAME' already exists."
|
||||||
|
else
|
||||||
|
echo "Creating repository '$ORG_NAME/$REPO_NAME'..."
|
||||||
|
# Repos must be created by a user, we create it in the org context
|
||||||
|
curl -s -X POST "$GITEA_URL/api/v1/orgs/$ORG_NAME/repos" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{
|
||||||
|
\"name\": \"$REPO_NAME\",
|
||||||
|
\"auto_init\": true,
|
||||||
|
\"default_branch\": \"main\",
|
||||||
|
\"gitignores\": \"Go\",
|
||||||
|
\"license\": \"MIT\",
|
||||||
|
\"private\": false,
|
||||||
|
\"readme\": \"Default\"
|
||||||
|
}"
|
||||||
|
echo -e "\nRepository '$ORG_NAME/$REPO_NAME' created with a README."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
add_collaborator() {
|
||||||
|
ORG_NAME=$1
|
||||||
|
REPO_NAME=$2
|
||||||
|
COLLABORATOR=$3
|
||||||
|
PERMISSION=$4
|
||||||
|
echo "--- Adding $COLLABORATOR as a collaborator to $ORG_NAME/$REPO_NAME with '$PERMISSION' permission ---"
|
||||||
|
|
||||||
|
# Note: The API returns 204 No Content on success and doesn't fail if the collaborator is already present.
|
||||||
|
curl -s -o /dev/null -X PUT "$GITEA_URL/api/v1/repos/$ORG_NAME/$REPO_NAME/collaborators/$COLLABORATOR" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{
|
||||||
|
\"permission\": \"$PERMISSION\"
|
||||||
|
}"
|
||||||
|
|
||||||
|
echo "Attempted to add $COLLABORATOR to $ORG_NAME/$REPO_NAME."
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "--- Starting Dummy Data Setup ---"
|
||||||
|
|
||||||
|
create_org "products"
|
||||||
|
create_org "pool"
|
||||||
|
|
||||||
|
create_repo "products" "SLFO"
|
||||||
|
create_repo "pool" "pkgA"
|
||||||
|
create_repo "pool" "pkgB"
|
||||||
|
|
||||||
|
add_submodules() {
|
||||||
|
echo "--- Adding submodules to products/SLFO using diffpatch ---"
|
||||||
|
|
||||||
|
PARENT_REPO_URL="$GITEA_URL/api/v1/repos/products/SLFO"
|
||||||
|
|
||||||
|
# Check if .gitmodules already exists to prevent re-adding
|
||||||
|
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X GET -H "Authorization: token $TOKEN" "$PARENT_REPO_URL/contents/.gitmodules")
|
||||||
|
if [ "$HTTP_STATUS" -eq 200 ]; then
|
||||||
|
echo "Submodules appear to be already added. Skipping."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get latest commit SHAs for the submodules
|
||||||
|
PKG_A_SHA=$(curl -s -X GET -H "Authorization: token $TOKEN" "$GITEA_URL/api/v1/repos/pool/pkgA/branches/main" | jq -r .commit.id)
|
||||||
|
PKG_B_SHA=$(curl -s -X GET -H "Authorization: token $TOKEN" "$GITEA_URL/api/v1/repos/pool/pkgB/branches/main" | jq -r .commit.id)
|
||||||
|
|
||||||
|
if [ -z "$PKG_A_SHA" ] || [ "$PKG_A_SHA" = "null" ] || [ -z "$PKG_B_SHA" ] || [ "$PKG_B_SHA" = "null" ]; then
|
||||||
|
echo "Error: Could not get submodule commit SHAs. Cannot apply patch."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Construct the diff, ensuring newlines are preserved
|
||||||
|
DIFF_CONTENT="diff --git a/.gitmodules b/.gitmodules
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..f1838bd
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/.gitmodules
|
||||||
|
@@ -0,0 +1,6 @@
|
||||||
|
+[submodule \"pkgA\"]
|
||||||
|
+ path = pkgA
|
||||||
|
+ url = ../../pool/pkgA.git
|
||||||
|
+[submodule \"pkgB\"]
|
||||||
|
+ path = pkgB
|
||||||
|
+ url = ../../pool/pkgB.git
|
||||||
|
diff --git a/pkgA b/pkgA
|
||||||
|
new file mode 160000
|
||||||
|
index 0000000..$PKG_A_SHA
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/pkgA
|
||||||
|
@@ -0,0 +1 @@
|
||||||
|
+Subproject commit $PKG_A_SHA
|
||||||
|
diff --git a/pkgB b/pkgB
|
||||||
|
new file mode 160000
|
||||||
|
index 0000000..$PKG_B_SHA
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/pkgB
|
||||||
|
@@ -0,0 +1 @@
|
||||||
|
+Subproject commit $PKG_B_SHA
|
||||||
|
diff --git a/workflow.config b/workflow.config
|
||||||
|
new file mode 100644
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/workflow.config
|
||||||
|
@@ -0,0 +7 @@
|
||||||
|
+{
|
||||||
|
+ \"Workflows\": [\"pr\"],
|
||||||
|
+ \"GitProjectName\": \"products/SLFO#main\",
|
||||||
|
+ \"Organization\": \"pool\",
|
||||||
|
+ \"Branch\": \"main\",
|
||||||
|
+ \"ManualMergeProject\": true,
|
||||||
|
+ \"Reviewers\": [ \"-autogits_obs_staging_bot\" ]
|
||||||
|
+}
|
||||||
|
diff --git a/staging.config b/staging.config
|
||||||
|
new file mode 100644
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/staging.config
|
||||||
|
@@ -0,0 +3 @@
|
||||||
|
+{
|
||||||
|
+ \"ObsProject\": \"openSUSE:Leap:16.0\",
|
||||||
|
+ \"StagingProject\": \"openSUSE:Leap:16.0:PullRequest\"
|
||||||
|
+}
|
||||||
|
"
|
||||||
|
|
||||||
|
# Prepare the JSON payload for the diffpatch API
|
||||||
|
PATCH_PAYLOAD=$(jq -n \
|
||||||
|
--arg branch "main" \
|
||||||
|
--arg content "$DIFF_CONTENT" \
|
||||||
|
--arg message "Add pkgA and pkgB as submodules" \
|
||||||
|
'{
|
||||||
|
"branch": $branch,
|
||||||
|
"content": $content,
|
||||||
|
"message": $message
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Apply the patch
|
||||||
|
echo "Applying submodule patch to products/SLFO..."
|
||||||
|
curl -s -X POST -H "Authorization: token $TOKEN" -H "Content-Type: application/json" \
|
||||||
|
"$PARENT_REPO_URL/diffpatch" \
|
||||||
|
-d "$PATCH_PAYLOAD"
|
||||||
|
|
||||||
|
echo -e "\nSubmodule patch applied."
|
||||||
|
}
|
||||||
|
|
||||||
|
add_submodules
|
||||||
|
|
||||||
|
add_collaborator "products" "SLFO" "autogits_obs_staging_bot" "write"
|
||||||
|
|
||||||
|
add_collaborator "products" "SLFO" "workflow-pr" "write"
|
||||||
|
add_collaborator "pool" "pkgA" "workflow-pr" "write"
|
||||||
|
add_collaborator "pool" "pkgB" "workflow-pr" "write"
|
||||||
|
|
||||||
|
update_repo_settings() {
|
||||||
|
ORG_NAME=$1
|
||||||
|
REPO_NAME=$2
|
||||||
|
echo "--- Updating repository settings for: $ORG_NAME/$REPO_NAME ---"
|
||||||
|
|
||||||
|
# Workaround for Gitea API bug: GET the repo data first, then PATCH it back with modifications.
|
||||||
|
REPO_DATA=$(curl -s -X GET -H "Authorization: token $TOKEN" "$GITEA_URL/api/v1/repos/$ORG_NAME/$REPO_NAME")
|
||||||
|
|
||||||
|
UPDATED_DATA=$(echo "$REPO_DATA" | jq '.allow_manual_merge = true | .autodetect_manual_merge = true')
|
||||||
|
|
||||||
|
curl -s -X PATCH "$GITEA_URL/api/v1/repos/$ORG_NAME/$REPO_NAME" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$UPDATED_DATA"
|
||||||
|
|
||||||
|
echo -e "\nRepository settings for '$ORG_NAME/$REPO_NAME' updated."
|
||||||
|
}
|
||||||
|
|
||||||
|
update_repo_settings "products" "SLFO"
|
||||||
|
update_repo_settings "pool" "pkgA"
|
||||||
|
update_repo_settings "pool" "pkgB"
|
||||||
|
|
||||||
|
echo "--- Dummy Data Setup Complete ---"
|
||||||
83
giwtf/gitea/container-files/opt/setup/setup-gitea.sh
Normal file
83
giwtf/gitea/container-files/opt/setup/setup-gitea.sh
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -x
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Set ownership on the volume mounts. This allows the 'gitea' user to write to them.
|
||||||
|
# We use -R to ensure all subdirectories (like /var/lib/gitea/data) are covered.
|
||||||
|
chown -R gitea:gitea /var/lib/gitea /var/log/gitea
|
||||||
|
|
||||||
|
# Set ownership on the config directory.
|
||||||
|
chown -R gitea:gitea /etc/gitea
|
||||||
|
|
||||||
|
# Run database migrations to initialize the sqlite3 db based on app.ini.
|
||||||
|
su -s /bin/bash gitea -c 'gitea migrate'
|
||||||
|
|
||||||
|
# Create a default admin user if it doesn't exist
|
||||||
|
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
|
||||||
|
echo "Creating admin user..."
|
||||||
|
su -s /bin/bash gitea -c 'gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin'
|
||||||
|
else
|
||||||
|
echo "Admin user already exists."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate SSH key for the admin user if it doesn't exist
|
||||||
|
SSH_KEY_DIR="/var/lib/gitea/ssh-keys"
|
||||||
|
mkdir -p "$SSH_KEY_DIR"
|
||||||
|
if [ ! -f "$SSH_KEY_DIR/id_ed25519" ]; then
|
||||||
|
echo "Generating SSH key for admin user..."
|
||||||
|
ssh-keygen -t ed25519 -N "" -f "$SSH_KEY_DIR/id_ed25519"
|
||||||
|
chown -R gitea:gitea "$SSH_KEY_DIR"
|
||||||
|
chmod 700 "$SSH_KEY_DIR"
|
||||||
|
chmod 600 "$SSH_KEY_DIR/id_ed25519"
|
||||||
|
chmod 644 "$SSH_KEY_DIR/id_ed25519.pub"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create a autogits_obs_staging_bot user if it doesn't exist
|
||||||
|
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
|
||||||
|
echo "Creating autogits_obs_staging_bot user..."
|
||||||
|
su -s /bin/bash gitea -c 'gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false'
|
||||||
|
else
|
||||||
|
echo "autogits_obs_staging_bot user already exists."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate an access token for the autogits_obs_staging_bot user
|
||||||
|
BOT_TOKEN_FILE="/var/lib/gitea/autogits_obs_staging_bot.token"
|
||||||
|
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||||
|
echo "autogits_obs_staging_bot token already exists at $BOT_TOKEN_FILE."
|
||||||
|
else
|
||||||
|
echo "Generating autogits_obs_staging_bot token..."
|
||||||
|
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token")
|
||||||
|
if [ -n "$BOT_TOKEN" ]; then
|
||||||
|
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||||
|
chmod 666 "$BOT_TOKEN_FILE"
|
||||||
|
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||||
|
echo "autogits_obs_staging_bot token generated and saved to $BOT_TOKEN_FILE."
|
||||||
|
else
|
||||||
|
echo "Failed to generate autogits_obs_staging_bot token."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create a workflow-pr user if it doesn't exist
|
||||||
|
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
|
||||||
|
echo "Creating workflow-pr user..."
|
||||||
|
su -s /bin/bash gitea -c 'gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false'
|
||||||
|
else
|
||||||
|
echo "workflow-pr user already exists."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate an access token for the workflow-pr user
|
||||||
|
BOT_TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||||
|
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||||
|
echo "workflow-pr token already exists at $BOT_TOKEN_FILE."
|
||||||
|
else
|
||||||
|
echo "Generating workflow-pr token..."
|
||||||
|
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token")
|
||||||
|
if [ -n "$BOT_TOKEN" ]; then
|
||||||
|
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||||
|
chmod 666 "$BOT_TOKEN_FILE"
|
||||||
|
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||||
|
echo "workflow-pr token generated and saved to $BOT_TOKEN_FILE."
|
||||||
|
else
|
||||||
|
echo "Failed to generate workflow-pr token."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
92
giwtf/gitea/container-files/opt/setup/setup-webhook.sh
Normal file
92
giwtf/gitea/container-files/opt/setup/setup-webhook.sh
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
GITEA_URL="http://localhost:3000"
|
||||||
|
WEBHOOK_URL="http://gitea-publisher:8002/rabbitmq-forwarder"
|
||||||
|
TOKEN_NAME="webhook-creator"
|
||||||
|
|
||||||
|
echo "Webhook setup script started in background."
|
||||||
|
|
||||||
|
# Wait 10s for the main Gitea process to start
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Wait for Gitea API to be ready
|
||||||
|
echo "Waiting for Gitea API at $GITEA_URL..."
|
||||||
|
while ! curl -s -f "$GITEA_URL/api/v1/version" > /dev/null; do
|
||||||
|
echo "Gitea API not up yet, waiting 5s..."
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
echo "Gitea API is up."
|
||||||
|
|
||||||
|
# The `gitea admin` command needs to be run as the gitea user.
|
||||||
|
# The -raw flag gives us the token directly.
|
||||||
|
echo "Generating or retrieving admin token..."
|
||||||
|
TOKEN_FILE="/var/lib/gitea/admin.token"
|
||||||
|
|
||||||
|
if [ -f "$TOKEN_FILE" ]; then
|
||||||
|
TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||||
|
echo "Admin token loaded from $TOKEN_FILE."
|
||||||
|
else
|
||||||
|
TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t $TOKEN_NAME")
|
||||||
|
if [ -n "$TOKEN" ]; then
|
||||||
|
printf "%s" "$TOKEN" > "$TOKEN_FILE"
|
||||||
|
chmod 666 "$TOKEN_FILE"
|
||||||
|
chown gitea:gitea "$TOKEN_FILE"
|
||||||
|
echo "Admin token generated and saved to $TOKEN_FILE."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$TOKEN" ]; then
|
||||||
|
echo "Failed to generate or retrieve admin token. This might be because the token already exists in Gitea but not in $TOKEN_FILE. Exiting."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run the dummy data setup script
|
||||||
|
/opt/setup/setup-dummy-data.sh "$GITEA_URL" "$TOKEN"
|
||||||
|
|
||||||
|
# Add SSH key via API
|
||||||
|
PUB_KEY_FILE="/var/lib/gitea/ssh-keys/id_ed25519.pub"
|
||||||
|
if [ -f "$PUB_KEY_FILE" ]; then
|
||||||
|
echo "Checking for existing SSH key 'bot-key'..."
|
||||||
|
KEYS_URL="$GITEA_URL/api/v1/admin/users/workflow-pr/keys"
|
||||||
|
EXISTING_KEYS=$(curl -s -X GET -H "Authorization: token $TOKEN" "$KEYS_URL")
|
||||||
|
|
||||||
|
if ! echo "$EXISTING_KEYS" | grep -q "\"title\":\"bot-key\""; then
|
||||||
|
echo "Registering SSH key 'bot-key' via API..."
|
||||||
|
KEY_CONTENT=$(cat "$PUB_KEY_FILE")
|
||||||
|
curl -s -X POST "$KEYS_URL" \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{
|
||||||
|
\"key\": \"$KEY_CONTENT\",
|
||||||
|
\"read_only\": false,
|
||||||
|
\"title\": \"bot-key\"
|
||||||
|
}"
|
||||||
|
echo -e "\nSSH key registered."
|
||||||
|
else
|
||||||
|
echo "SSH key 'bot-key' already registered."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the webhook already exists
|
||||||
|
echo "Checking for existing system webhook..."
|
||||||
|
DB_PATH="/var/lib/gitea/data/gitea.db"
|
||||||
|
EXISTS=$(su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"SELECT 1 FROM webhook WHERE url = '$WEBHOOK_URL' AND is_system_webhook = 1 LIMIT 1;\"")
|
||||||
|
|
||||||
|
if [ "$EXISTS" = "1" ]; then
|
||||||
|
echo "System webhook for $WEBHOOK_URL already exists. Exiting."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Creating Gitea system webhook for $WEBHOOK_URL via direct database INSERT..."
|
||||||
|
# The events JSON requires escaped double quotes for the sqlite3 command.
|
||||||
|
EVENTS_JSON='{\"push_only\":false,\"send_everything\":true,\"choose_events\":false,\"branch_filter\":\"*\",\"events\":{\"create\":false,\"delete\":false,\"fork\":false,\"issue_assign\":false,\"issue_comment\":false,\"issue_label\":false,\"issue_milestone\":false,\"issues\":false,\"package\":false,\"pull_request\":false,\"pull_request_assign\":false,\"pull_request_comment\":false,\"pull_request_label\":false,\"pull_request_milestone\":false,\"pull_request_review\":false,\"pull_request_review_request\":false,\"pull_request_sync\":false,\"push\":false,\"release\":false,\"repository\":false,\"status\":false,\"wiki\":false,\"workflow_job\":false,\"workflow_run\":false}}'
|
||||||
|
NOW_UNIX=$(date +%s)
|
||||||
|
|
||||||
|
INSERT_CMD="INSERT INTO webhook (repo_id, owner_id, is_system_webhook, url, http_method, content_type, events, is_active, type, meta, created_unix, updated_unix) VALUES (0, 0, 1, '$WEBHOOK_URL', 'POST', 1, '$EVENTS_JSON', 1, 'gitea', '', $NOW_UNIX, $NOW_UNIX);"
|
||||||
|
|
||||||
|
su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"$INSERT_CMD\""
|
||||||
|
|
||||||
|
echo "System webhook created successfully."
|
||||||
|
|
||||||
|
exit 0
|
||||||
14
giwtf/mock-obs/Dockerfile
Normal file
14
giwtf/mock-obs/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Use a base Python image
|
||||||
|
FROM registry.suse.com/bci/python:3.11
|
||||||
|
|
||||||
|
# Set the working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy the server script
|
||||||
|
COPY server.py .
|
||||||
|
|
||||||
|
# Expose the port the server will run on
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Command to run the server
|
||||||
|
CMD ["python3", "-u", "server.py"]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||||
|
<title>Leap 16.0 PullRequest area</title>
|
||||||
|
<description>Base project to define the pull request builds</description>
|
||||||
|
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||||
|
<person userid="maxlin_factory" role="maintainer"/>
|
||||||
|
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||||
|
<debuginfo>
|
||||||
|
<enable/>
|
||||||
|
</debuginfo>
|
||||||
|
<repository name="standard">
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
</project>
|
||||||
59
giwtf/mock-obs/responses/GET_source_openSUSE:Leap:16.0__meta
Normal file
59
giwtf/mock-obs/responses/GET_source_openSUSE:Leap:16.0__meta
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
<project name="openSUSE:Leap:16.0">
|
||||||
|
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||||
|
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||||
|
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||||
|
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||||
|
<person userid="dimstar_suse" role="maintainer"/>
|
||||||
|
<person userid="lkocman-factory" role="maintainer"/>
|
||||||
|
<person userid="maxlin_factory" role="maintainer"/>
|
||||||
|
<person userid="factory-auto" role="reviewer"/>
|
||||||
|
<person userid="licensedigger" role="reviewer"/>
|
||||||
|
<group groupid="autobuild-team" role="maintainer"/>
|
||||||
|
<group groupid="factory-maintainers" role="maintainer"/>
|
||||||
|
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||||
|
<group groupid="factory-staging" role="reviewer"/>
|
||||||
|
<build>
|
||||||
|
<disable repository="ports"/>
|
||||||
|
</build>
|
||||||
|
<debuginfo>
|
||||||
|
<enable/>
|
||||||
|
</debuginfo>
|
||||||
|
<repository name="standard" rebuild="local">
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>local</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="product">
|
||||||
|
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||||
|
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>local</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="ports">
|
||||||
|
<arch>armv7l</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="images">
|
||||||
|
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
</project>
|
||||||
140
giwtf/mock-obs/server.py
Normal file
140
giwtf/mock-obs/server.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import http.server
|
||||||
|
import socketserver
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import fnmatch
|
||||||
|
|
||||||
|
PORT = 8080
|
||||||
|
RESPONSE_DIR = "/app/responses"
|
||||||
|
STATE_DIR = "/tmp/mock_obs_state"
|
||||||
|
|
||||||
|
class MockOBSHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
|
def do_GET(self):
|
||||||
|
logging.info(f"GET request for: {self.path}")
|
||||||
|
path_without_query = self.path.split('?')[0]
|
||||||
|
|
||||||
|
# Check for state stored by a PUT request first
|
||||||
|
sanitized_put_path = 'PUT' + path_without_query.replace('/', '_')
|
||||||
|
state_file_path = os.path.join(STATE_DIR, sanitized_put_path)
|
||||||
|
if os.path.exists(state_file_path):
|
||||||
|
logging.info(f"Found stored PUT state for {self.path} at {state_file_path}")
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header("Content-type", "application/xml")
|
||||||
|
file_size = os.path.getsize(state_file_path)
|
||||||
|
self.send_header("Content-Length", str(file_size))
|
||||||
|
self.end_headers()
|
||||||
|
with open(state_file_path, 'rb') as f:
|
||||||
|
self.wfile.write(f.read())
|
||||||
|
return
|
||||||
|
|
||||||
|
# If no PUT state file, fall back to the glob/exact match logic
|
||||||
|
self.handle_request('GET')
|
||||||
|
|
||||||
|
def do_PUT(self):
|
||||||
|
logging.info(f"PUT request for: {self.path}")
|
||||||
|
logging.info(f"Headers: {self.headers}")
|
||||||
|
path_without_query = self.path.split('?')[0]
|
||||||
|
|
||||||
|
body = b''
|
||||||
|
if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
|
||||||
|
logging.info("Chunked transfer encoding detected")
|
||||||
|
while True:
|
||||||
|
line = self.rfile.readline().strip()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
chunk_length = int(line, 16)
|
||||||
|
if chunk_length == 0:
|
||||||
|
self.rfile.readline()
|
||||||
|
break
|
||||||
|
body += self.rfile.read(chunk_length)
|
||||||
|
self.rfile.read(2) # Read the trailing CRLF
|
||||||
|
else:
|
||||||
|
content_length = int(self.headers.get('Content-Length', 0))
|
||||||
|
body = self.rfile.read(content_length)
|
||||||
|
|
||||||
|
logging.info(f"Body: {body.decode('utf-8')}")
|
||||||
|
sanitized_path = 'PUT' + path_without_query.replace('/', '_')
|
||||||
|
state_file_path = os.path.join(STATE_DIR, sanitized_path)
|
||||||
|
|
||||||
|
logging.info(f"Saving state for {self.path} to {state_file_path}")
|
||||||
|
os.makedirs(os.path.dirname(state_file_path), exist_ok=True)
|
||||||
|
with open(state_file_path, 'wb') as f:
|
||||||
|
f.write(body)
|
||||||
|
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header("Content-type", "text/plain")
|
||||||
|
response_body = b"OK"
|
||||||
|
self.send_header("Content-Length", str(len(response_body)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(response_body)
|
||||||
|
|
||||||
|
def do_POST(self):
|
||||||
|
logging.info(f"POST request for: {self.path}")
|
||||||
|
self.handle_request('POST')
|
||||||
|
|
||||||
|
def do_DELETE(self):
|
||||||
|
logging.info(f"DELETE request for: {self.path}")
|
||||||
|
self.handle_request('DELETE')
|
||||||
|
|
||||||
|
def handle_request(self, method):
|
||||||
|
path_without_query = self.path.split('?')[0]
|
||||||
|
sanitized_request_path = method + path_without_query.replace('/', '_')
|
||||||
|
logging.info(f"Handling request, looking for match for: {sanitized_request_path}")
|
||||||
|
|
||||||
|
response_file = None
|
||||||
|
# Check for glob match first
|
||||||
|
if os.path.exists(RESPONSE_DIR):
|
||||||
|
for filename in os.listdir(RESPONSE_DIR):
|
||||||
|
if fnmatch.fnmatch(sanitized_request_path, filename):
|
||||||
|
response_file = os.path.join(RESPONSE_DIR, filename)
|
||||||
|
logging.info(f"Found matching response file (glob): {response_file}")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Fallback to exact match if no glob match
|
||||||
|
if response_file is None:
|
||||||
|
exact_file = os.path.join(RESPONSE_DIR, sanitized_request_path)
|
||||||
|
if os.path.exists(exact_file):
|
||||||
|
response_file = exact_file
|
||||||
|
logging.info(f"Found matching response file (exact): {response_file}")
|
||||||
|
|
||||||
|
if response_file:
|
||||||
|
logging.info(f"Serving content from {response_file}")
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header("Content-type", "application/xml")
|
||||||
|
file_size = os.path.getsize(response_file)
|
||||||
|
self.send_header("Content-Length", str(file_size))
|
||||||
|
self.end_headers()
|
||||||
|
with open(response_file, 'rb') as f:
|
||||||
|
self.wfile.write(f.read())
|
||||||
|
else:
|
||||||
|
logging.info(f"Response file not found for {sanitized_request_path}. Sending 404.")
|
||||||
|
self.send_response(404)
|
||||||
|
self.send_header("Content-type", "text/plain")
|
||||||
|
body = f"Mock response not found for {sanitized_request_path}".encode('utf-8')
|
||||||
|
self.send_header("Content-Length", str(len(body)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(body)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
|
||||||
|
|
||||||
|
if not os.path.exists(STATE_DIR):
|
||||||
|
logging.info(f"Creating state directory: {STATE_DIR}")
|
||||||
|
os.makedirs(STATE_DIR)
|
||||||
|
if not os.path.exists(RESPONSE_DIR):
|
||||||
|
os.makedirs(RESPONSE_DIR)
|
||||||
|
|
||||||
|
with socketserver.TCPServer(("", PORT), MockOBSHandler) as httpd:
|
||||||
|
logging.info(f"Serving mock OBS API on port {PORT}")
|
||||||
|
|
||||||
|
def graceful_shutdown(sig, frame):
|
||||||
|
logging.info("Received SIGTERM, shutting down gracefully...")
|
||||||
|
threading.Thread(target=httpd.shutdown).start()
|
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, graceful_shutdown)
|
||||||
|
|
||||||
|
httpd.serve_forever()
|
||||||
|
logging.info("Server has shut down.")
|
||||||
1
giwtf/obs-staging-bot/Dockerfile
Symbolic link
1
giwtf/obs-staging-bot/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
./Dockerfile.package
|
||||||
18
giwtf/obs-staging-bot/Dockerfile.local
Normal file
18
giwtf/obs-staging-bot/Dockerfile.local
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Use a base Python image
|
||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
# Install any necessary dependencies for the bot
|
||||||
|
# e.g., git, curl, etc.
|
||||||
|
RUN zypper -n in git-core curl binutils
|
||||||
|
|
||||||
|
# Copy the bot binary and its entrypoint script
|
||||||
|
COPY obs-staging-bot/obs-staging-bot /usr/local/bin/obs-staging-bot
|
||||||
|
COPY obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
# Create a non-root user to run the bot
|
||||||
|
RUN useradd -m -u 1001 bot
|
||||||
|
USER 1001
|
||||||
|
|
||||||
|
# Set the entrypoint
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
19
giwtf/obs-staging-bot/Dockerfile.package
Normal file
19
giwtf/obs-staging-bot/Dockerfile.package
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Use a base Python image
|
||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||||
|
RUN zypper --gpg-auto-import-keys ref
|
||||||
|
|
||||||
|
# Install any necessary dependencies for the bot
|
||||||
|
# e.g., git, curl, etc.
|
||||||
|
RUN zypper -n in git-core curl autogits-obs-staging-bot binutils
|
||||||
|
|
||||||
|
COPY obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
# Create a non-root user to run the bot
|
||||||
|
RUN useradd -m -u 1001 bot
|
||||||
|
USER 1001
|
||||||
|
|
||||||
|
# Set the entrypoint
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
28
giwtf/obs-staging-bot/entrypoint.sh
Normal file
28
giwtf/obs-staging-bot/entrypoint.sh
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# This script waits for the Gitea admin token to be created,
|
||||||
|
# exports it as an environment variable, and then executes the main container command.
|
||||||
|
|
||||||
|
TOKEN_FILE="/gitea-data/autogits_obs_staging_bot.token"
|
||||||
|
|
||||||
|
echo "OBS Staging Bot: Waiting for Gitea autogits_obs_staging_bot token at $TOKEN_FILE..."
|
||||||
|
while [ ! -s "$TOKEN_FILE" ]; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
export GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||||
|
echo "OBS Staging Bot: GITEA_TOKEN exported."
|
||||||
|
|
||||||
|
# Execute the bot as the current user (root), using 'env' to pass required variables.
|
||||||
|
echo "OBS Staging Bot: Executing bot..."
|
||||||
|
|
||||||
|
exe=$(which obs-staging-bot)
|
||||||
|
exe=${exe:-/usr/local/bin/obs-staging-bot}
|
||||||
|
|
||||||
|
package=$(rpm -qa | grep autogits-obs-staging-bot) || :
|
||||||
|
|
||||||
|
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||||
|
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||||
|
|
||||||
|
exec $exe "$@"
|
||||||
136
giwtf/podman-compose.yml
Normal file
136
giwtf/podman-compose.yml
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
gitea-network:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
services:
|
||||||
|
gitea:
|
||||||
|
build: ./gitea
|
||||||
|
container_name: gitea-test
|
||||||
|
environment:
|
||||||
|
- GITEA_WORK_DIR=/var/lib/gitea
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
ports:
|
||||||
|
# Map the HTTP and SSH ports defined in your app.ini
|
||||||
|
- "3000:3000"
|
||||||
|
- "3022:3022"
|
||||||
|
volumes:
|
||||||
|
# Persist Gitea's data (repositories, sqlite db, etc.) to a local directory
|
||||||
|
# The :z flag allows sharing between containers
|
||||||
|
- ./gitea-data:/var/lib/gitea:z
|
||||||
|
# Persist Gitea's logs to a local directory
|
||||||
|
- ./gitea-logs:/var/log/gitea:Z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
rabbitmq:
|
||||||
|
image: rabbitmq:3.13.7-management
|
||||||
|
container_name: rabbitmq-test
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "rabbitmq-diagnostics", "check_running", "-q"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 30s
|
||||||
|
retries: 3
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
ports:
|
||||||
|
# AMQP protocol port with TLS
|
||||||
|
- "5671:5671"
|
||||||
|
# HTTP management UI
|
||||||
|
- "15672:15672"
|
||||||
|
volumes:
|
||||||
|
# Persist RabbitMQ data
|
||||||
|
- ./rabbitmq-data:/var/lib/rabbitmq:Z
|
||||||
|
# Mount TLS certs
|
||||||
|
- ./rabbitmq-config/certs:/etc/rabbitmq/certs:Z
|
||||||
|
# Mount rabbitmq config
|
||||||
|
- ./rabbitmq-config/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:Z
|
||||||
|
# Mount exchange definitions
|
||||||
|
- ./rabbitmq-config/definitions.json:/etc/rabbitmq/definitions.json:Z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
gitea-publisher:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: gitea-events-rabbitmq-publisher/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||||
|
container_name: gitea-publisher
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
depends_on:
|
||||||
|
gitea:
|
||||||
|
condition: service_started
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
- RABBITMQ_HOST=rabbitmq-test
|
||||||
|
- RABBITMQ_USERNAME=gitea
|
||||||
|
- RABBITMQ_PASSWORD=gitea
|
||||||
|
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
command: [ "-listen", "0.0.0.0:8002", "-topic-domain", "suse", "-debug" ]
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
workflow-pr:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: workflow-pr/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||||
|
container_name: workflow-pr
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
depends_on:
|
||||||
|
gitea:
|
||||||
|
condition: service_started
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
- AMQP_USERNAME=gitea
|
||||||
|
- AMQP_PASSWORD=gitea
|
||||||
|
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
volumes:
|
||||||
|
- ./gitea-data:/var/lib/gitea:ro,z
|
||||||
|
- ./workflow-pr/workflow-pr.json:/etc/workflow-pr.json:ro,z
|
||||||
|
- ./workflow-pr-repos:/var/lib/workflow-pr/repos:Z
|
||||||
|
command: [
|
||||||
|
"-check-on-start",
|
||||||
|
"-debug",
|
||||||
|
"-gitea-url", "http://gitea-test:3000",
|
||||||
|
"-url", "amqps://rabbitmq-test:5671",
|
||||||
|
"-config", "/etc/workflow-pr.json",
|
||||||
|
"-repo-path", "/var/lib/workflow-pr/repos"
|
||||||
|
]
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
mock-obs:
|
||||||
|
build: ./mock-obs
|
||||||
|
container_name: mock-obs
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
volumes:
|
||||||
|
- ./mock-obs/responses:/app/responses:z # Use :z for shared SELinux label
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
obs-staging-bot:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: obs-staging-bot/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||||
|
container_name: obs-staging-bot
|
||||||
|
networks:
|
||||||
|
- gitea-network
|
||||||
|
depends_on:
|
||||||
|
gitea:
|
||||||
|
condition: service_started
|
||||||
|
mock-obs:
|
||||||
|
condition: service_started
|
||||||
|
environment:
|
||||||
|
- OBS_USER=mock
|
||||||
|
- OBS_PASSWORD=mock-long-password
|
||||||
|
volumes:
|
||||||
|
- ./gitea-data:/gitea-data:ro,z
|
||||||
|
command:
|
||||||
|
- "-debug"
|
||||||
|
- "-gitea-url=http://gitea-test:3000"
|
||||||
|
- "-obs=http://mock-obs:8080"
|
||||||
|
- "-obs-web=http://mock-obs:8080"
|
||||||
|
restart: unless-stopped
|
||||||
30
giwtf/rabbitmq-config/certs/cert.pem
Normal file
30
giwtf/rabbitmq-config/certs/cert.pem
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFKzCCAxOgAwIBAgIUJsg/r0ZyIVxtAkrlZKOr4LvYEvMwDQYJKoZIhvcNAQEL
|
||||||
|
BQAwGDEWMBQGA1UEAwwNcmFiYml0bXEtdGVzdDAeFw0yNjAxMjQxMjQyMjNaFw0z
|
||||||
|
NjAxMjIxMjQyMjNaMBgxFjAUBgNVBAMMDXJhYmJpdG1xLXRlc3QwggIiMA0GCSqG
|
||||||
|
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC9OjTq4DgqVo0mRpS8DGRR6SFrSpb2bqnl
|
||||||
|
YI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4WyRQH5PwnXOA1Lxz51qp1
|
||||||
|
t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sbKY1+/NVJpMs6dL6NzJXG
|
||||||
|
N6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYUmm43tjjxPZ81rhCXFx/F
|
||||||
|
v1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKXOPDxrM1erBN5eCwTpJMS
|
||||||
|
4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVAsqhiUa2eeAVkFwZl9wRw
|
||||||
|
lddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hAY/V+VbF1iHGAVIz4XlUj
|
||||||
|
gC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP+1hZJbm7ePsS5Na+qYG1
|
||||||
|
LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDPXCxFXDrIcFvldmJYTyeK
|
||||||
|
7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9bBjNnWaY2z17hijnzVVz
|
||||||
|
jOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrkNt7DOCUqwBnuBqYiRZFt
|
||||||
|
A1QicHxaEQIDAQABo20wazAdBgNVHQ4EFgQU3l25Ghab2k7UhwxftZ2vZ1HO9Sow
|
||||||
|
HwYDVR0jBBgwFoAU3l25Ghab2k7UhwxftZ2vZ1HO9SowDwYDVR0TAQH/BAUwAwEB
|
||||||
|
/zAYBgNVHREEETAPgg1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3DQEBCwUAA4ICAQB9
|
||||||
|
ilcsRqIvnyN25Oh668YC/xxyeNTIaIxjMLyJaMylBRjNwo1WfbdpXToaEXgot5gK
|
||||||
|
5HGlu3OIBBwBryNAlBtf/usxzLzmkEsm1Dsn9sJNY1ZTkD8MO9yyOtLqBlqAsIse
|
||||||
|
oPVjzSdjk1fP3uyoG/ZUVAFZHZD3/9BEsftfS13oUVxo7vYz1DSyUATT/4QTYMQB
|
||||||
|
PytL6EKJ0dLyuy7rIkZVkaUi+P7GuDXj25Mi6Zkxaw2QnssSuoqy1bAMkzEyNFK5
|
||||||
|
0wlNWEY8H3jRZuAz1T4AXb9sjeCgBKZoWXgmGbzleOophdzvlq66UGAWPWYFGp8Q
|
||||||
|
4GJognovhKzSY9+3n+rMPLAXSao48SYDlyTOZeBo1DTluR5QjVd+NWbEdIsA6buQ
|
||||||
|
a6uPTSVKsulm7hyUlEZp+SsYAtVoZx3jzKKjZXjnaxOfUFWx6pTxNXvxR7pQ/8Ls
|
||||||
|
IfduGy4VjKVQdyuwCE7eVEPDK6d53WWs6itziuj7gfq8mHvZivIA65z05lTwqkvb
|
||||||
|
1WS2aht+zacqVSYyNrK+/kJA2CST3ggc1EO73lRvbfO9LJZWMdO+f/tkXH4zkfmL
|
||||||
|
A3JtJcLOWuv+ZrZvHMpKlBFNMySxE3IeGX+Ad9bGyhZvZULut95/QD7Xy4cPRZHF
|
||||||
|
R3SRn0rn/BeTly+5fkEoFk+ttah8IbwzhduPyPIxng==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
52
giwtf/rabbitmq-config/certs/key.pem
Normal file
52
giwtf/rabbitmq-config/certs/key.pem
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC9OjTq4DgqVo0m
|
||||||
|
RpS8DGRR6SFrSpb2bqnlYI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4W
|
||||||
|
yRQH5PwnXOA1Lxz51qp1t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sb
|
||||||
|
KY1+/NVJpMs6dL6NzJXGN6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYU
|
||||||
|
mm43tjjxPZ81rhCXFx/Fv1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKX
|
||||||
|
OPDxrM1erBN5eCwTpJMS4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVA
|
||||||
|
sqhiUa2eeAVkFwZl9wRwlddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hA
|
||||||
|
Y/V+VbF1iHGAVIz4XlUjgC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP
|
||||||
|
+1hZJbm7ePsS5Na+qYG1LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDP
|
||||||
|
XCxFXDrIcFvldmJYTyeK7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9
|
||||||
|
bBjNnWaY2z17hijnzVVzjOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrk
|
||||||
|
Nt7DOCUqwBnuBqYiRZFtA1QicHxaEQIDAQABAoICAA+AWvDpzNgVDouV6R3NkxNN
|
||||||
|
upXgPqUx9BuNETCtbal6i4AxR1l/zC9gwti82QTKQi2OeM74MHd8zjcqIkiyRsDP
|
||||||
|
wDNDKIfEAONTT+4LLoWEN5WNDGRZ4Nw1LrLqiVX+ULtNPXvynRJtLQa43PVL74oQ
|
||||||
|
pLBle23A1n0uNmcJ9w21B6ktysN9q+JVSCZodZpD6Jk1jus8JXgDXy/9Za2NMTV8
|
||||||
|
A5ShbYz/ETSBJCSnERz7GARW7TN6V0jS6vLTSqMQJyn0KYbHNDr7TPTL7psRuaI5
|
||||||
|
jP/cqxmx1/WKLo5k3cR3IW/cesDGQXZhMRQvNymXJkxvWMPS36lmfyZtbFNflw4Z
|
||||||
|
9OD+2RKt5jFDJjG8fYiYoYBdLiTj2Wdvo4mbRPNkTL75o65riDkDCQuZhDXFBm3s
|
||||||
|
B1aDv5y1AXrzNZ5JSikszKgbLNPYB0rI3unp6i0P1985w6dyel0MGG+ouaeiyrxS
|
||||||
|
9IgJDnE4BJ79mEzHTXtbZ/+3aGAK/Y6mU8Pz2s6/+6ccT0miievsMS+si1KESF31
|
||||||
|
WLnsMdcrJcxqcm7Ypo24G0yBJluSDKtD1cqQUGN1MKp+EEv1SCH+4csaa3ooRB0o
|
||||||
|
YveySjqxtmhVpQuY3egCOaXhPmX7lgYwoe+G4UIkUMwPn20WMg+jFxgPASdh4lqE
|
||||||
|
mzpePP7STvEZAr+rrLu1AoIBAQDmCEiKOsUTtJlX3awOIRtCkIqBxS1E6rpyjfxK
|
||||||
|
A6+zpXnE++8MhIJ07+9bPdOshGjS3JbJ+hu+IocbNg++rjRArYQnJh8/qBZ2GB2v
|
||||||
|
Ryfptsoxtk/xUsmOfchvk4tOjvDHZrJehUtGc+LzX/WUqpgtEk1Gnx7RGRuDNnqS
|
||||||
|
Q1+yU4NubHwOHPswBBXOnVtopcAHFpKhbKRFOHOwMZN99qcWVIkv4J9c6emcPMLI
|
||||||
|
I/QPIvwB6WmbLa0o3JNXlD4kPdqCgNW36KEFiW8m+4tgzF3HWYSAyIeBRFG7ouE6
|
||||||
|
yk5hiptPKhZlTmTAkQSssCXksiTw1rsspFULZSRyaaaPunvVAoIBAQDSlrKu+B2h
|
||||||
|
AJtxWy5MQDOiroqT3KDneIGXPYgH3/tiDmxy0CIEbSb5SqZ6zAmihs3dWWCmc1JH
|
||||||
|
YObRrqIxu+qVi4K+Uz8l7WBrS7DkjZjajq+y/mrZYUNRoL2q9mnNqRNan7zxWDJc
|
||||||
|
U4u2NH9P4LOz6ttE4OG9SC3/gZLoepA+ANZatu93749IT7z8ske0MVPP76jVI1Gl
|
||||||
|
D7cPIlzcBUdJgNV8UOkxeqU3+S6Jn17Tkx5qMWND/2BCN4voQ4pfGWSkbaHlMLh1
|
||||||
|
2SbVuR+HYPY3aPJeSY7MEPoc7d2SSVOcVDr2AQwSDSCCgIFZOZlawehUz9R51hK8
|
||||||
|
LlaccFWXhS9NAoIBAEFZNRJf48DXW4DErq5M5WuhmFeJZnTfohwNDhEQvwdwCQnW
|
||||||
|
8HBD7LO/veXTyKCH9SeCFyxF6z+2m181mn93Cc0d/h8JC3OQEuF1tGko88PHc+Vv
|
||||||
|
f4J1HGFohlp8NeUZYnmjSSTlBR98qIqvRhr348daHa3kYmLQmSpLfcKzdSo542qp
|
||||||
|
UwzHWuynHHLX7THrdIQO+5T0Qi6P/P2e9+GfApSra1W4oE1K/lyuPj+RRzJNo/3/
|
||||||
|
C0tUTI8BKrKEoKq3D65nX0+hvKzQAE24xD25kSKi4aucTDKC8B04BngnJOE8+SYi
|
||||||
|
NL6O6Lxz9joAyKMRoMDyn7Xs8WQNVa9TKEhImAkCggEBAMljmIm/egZIoF7thf8h
|
||||||
|
vr+rD5eL/Myf776E95wgVTVW+dtqs71r7UOmYkM48VXeeO1f1hAYZO0h/Fs2GKJb
|
||||||
|
RWGyQ1xkHBXXRsgVYJuR1kXdAqW4rNIqM8jSYdAnStOFB5849+YOJEsrEocy+TWY
|
||||||
|
fAJpbTwXm4n6hxK8BZQR8fN5tYSXQbd+/5V1vBQlInFuYuqOFPWPizrBJp1wjUFU
|
||||||
|
QvJGJON4NSo+UdaPlDPEl1jabtG7XWTfylxI5qE+RgvgKuEcfyDBUQZSntLw8Pf0
|
||||||
|
gEJJOM92pPr+mVIlICoPucfcvW4ZXkO9DgP/hLOhY8jpe5fwERBa6xvPbMC6pP/8
|
||||||
|
PFkCggEBAOLtvboBThe57QRphsKHmCtRJHmT4oZzhMYsE+5GMGYzPNWod1hSyfXn
|
||||||
|
EB8iTmAFP5r7FdC10B8mMpACXuDdi2jbmlYOTU6xNTprSKtv8r8CvorWJdsQwRsy
|
||||||
|
pZ7diSCeyi0z/sIx//ov0b3WD0E8BG/HWsFbX0p5xXpaljYEv5dK7xUiWgBW+15a
|
||||||
|
N1AeVcPiXRDwhQMVcvVOvzgwKsw+Rpls/9W4hihcBHaiMcBUDFWxJtnf4ZAGAZS3
|
||||||
|
/694MOYlmfgT/cDqF9oOsCdxM0w24kL0dcUM7zPk314ixAAfUwXaxisBhS2roJ88
|
||||||
|
HsuK9JPSK/AS0IqUtKiq4LZ9ErixYF0=
|
||||||
|
-----END PRIVATE KEY-----
|
||||||
35
giwtf/rabbitmq-config/definitions.json
Executable file
35
giwtf/rabbitmq-config/definitions.json
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"users": [
|
||||||
|
{
|
||||||
|
"name": "gitea",
|
||||||
|
"password_hash": "5IdZmMJhNb4otX/nz9Xtmkpj9khl6+5eAmXNs/oHYwQNO3jg",
|
||||||
|
"hashing_algorithm": "rabbit_password_hashing_sha256",
|
||||||
|
"tags": "administrator"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"vhosts": [
|
||||||
|
{
|
||||||
|
"name": "/"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"permissions": [
|
||||||
|
{
|
||||||
|
"user": "gitea",
|
||||||
|
"vhost": "/",
|
||||||
|
"configure": ".*",
|
||||||
|
"write": ".*",
|
||||||
|
"read": ".*"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"exchanges": [
|
||||||
|
{
|
||||||
|
"name": "pubsub",
|
||||||
|
"vhost": "/",
|
||||||
|
"type": "topic",
|
||||||
|
"durable": true,
|
||||||
|
"auto_delete": false,
|
||||||
|
"internal": false,
|
||||||
|
"arguments": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
7
giwtf/rabbitmq-config/rabbitmq.conf
Normal file
7
giwtf/rabbitmq-config/rabbitmq.conf
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
listeners.ssl.default = 5671
|
||||||
|
|
||||||
|
ssl_options.certfile = /etc/rabbitmq/certs/cert.pem
|
||||||
|
ssl_options.keyfile = /etc/rabbitmq/certs/key.pem
|
||||||
|
ssl_options.verify = verify_none
|
||||||
|
ssl_options.fail_if_no_peer_cert = false
|
||||||
|
management.load_definitions = /etc/rabbitmq/definitions.json
|
||||||
83
giwtf/test-plan.md
Normal file
83
giwtf/test-plan.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# Test Plan: workflow-pr Bot
|
||||||
|
|
||||||
|
## 1. Introduction
|
||||||
|
|
||||||
|
This document outlines the test plan for the `workflow-pr` bot. The bot is responsible for synchronizing pull requests between ProjectGit and PackageGit repositories, managing reviews, and handling merges. This test plan aims to ensure the bot's functionality, reliability, and performance.
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
|
||||||
|
### In Scope
|
||||||
|
|
||||||
|
* Pull Request synchronization (creation, update, closing).
|
||||||
|
* Reviewer management (adding, re-adding, mandatory vs. advisory).
|
||||||
|
* Merge management, including `ManualMergeOnly` and `ManualMergeProject` flags.
|
||||||
|
* Configuration parsing (`workflow.config`).
|
||||||
|
* Label management (`staging/Auto`, `review/Pending`, `review/Done`).
|
||||||
|
* Maintainership and permissions handling.
|
||||||
|
|
||||||
|
### Out of Scope
|
||||||
|
|
||||||
|
* Package deletion requests (planned feature).
|
||||||
|
* Underlying infrastructure (Gitea, RabbitMQ, OBS).
|
||||||
|
* Performance and load testing.
|
||||||
|
* Closing a PackageGit PR (currently disabled).
|
||||||
|
|
||||||
|
## 3. Test Objectives
|
||||||
|
|
||||||
|
* Verify that pull requests are correctly synchronized between ProjectGit and PackageGit.
|
||||||
|
* Ensure that reviewers are correctly added to pull requests based on the configuration.
|
||||||
|
* Validate that pull requests are merged only when all conditions are met.
|
||||||
|
* Confirm that the bot correctly handles various configurations in `workflow.config`.
|
||||||
|
* Verify that labels are correctly applied to pull requests.
|
||||||
|
* Ensure that maintainership and permissions are correctly enforced.
|
||||||
|
|
||||||
|
## 4. Test Strategy
|
||||||
|
|
||||||
|
The testing will be conducted in a dedicated test environment that mimics the production environment. The strategy will involve a combination of:
|
||||||
|
|
||||||
|
* **Component Testing:** Testing individual components of the bot in isolation using unit tests written in Go.
|
||||||
|
* **Integration Testing:** Testing the bot's interaction with Gitea, RabbitMQ, and a mock OBS server using `pytest`.
|
||||||
|
* **End-to-End Testing:** Testing the complete workflow from creating a pull request to merging it using `pytest`.
|
||||||
|
|
||||||
|
### Test Automation
|
||||||
|
|
||||||
|
* **Unit Tests:** Go's built-in testing framework will be used to write unit tests for individual functions and methods.
|
||||||
|
* **Integration and End-to-End Tests:** `pytest` will be used to write integration and end-to-end tests that use the Gitea API to create pull requests and verify the bot's behavior.
|
||||||
|
|
||||||
|
### Success Metrics
|
||||||
|
|
||||||
|
* **Test Coverage:** The goal is to achieve at least 80% test coverage for the bot's codebase.
|
||||||
|
* **Bug Detection Rate:** The number of bugs found during the testing phase.
|
||||||
|
* **Test Pass Rate:** The percentage of test cases that pass without any issues.
|
||||||
|
|
||||||
|
|
||||||
|
## 5. Test Cases
|
||||||
|
|
||||||
|
| Test Case ID | Description | Steps to Reproduce | Expected Results | Priority |
|
||||||
|
| :--- | :--- | :--- | :--- | :--- |
|
||||||
|
| **TC-SYNC-001** | **Create ProjectGit PR from PackageGit PR** | 1. Create a new PR in a PackageGit repository. | 1. A new PR is created in the corresponding ProjectGit repository with the title "Forwarded PRs: <package_name>".<br>2. The ProjectGit PR description contains a link to the PackageGit PR (e.g., `PR: org/package_repo!pr_number`).<br>3. The package submodule in the ProjectGit PR points to the PackageGit PR's commit. | High |
|
||||||
|
| **TC-SYNC-002** | **Update ProjectGit PR from PackageGit PR** | 1. Push a new commit to an existing PackageGit PR. | 1. The corresponding ProjectGit PR's head branch is updated with the new commit. | High |
|
||||||
|
| **TC-SYNC-003** | **WIP Flag Synchronization** | 1. Mark a PackageGit PR as "Work In Progress".<br>2. Remove the WIP flag from the PackageGit PR. | 1. The corresponding ProjectGit PR is also marked as "Work In Progress".<br>2. The WIP flag on the ProjectGit PR is removed. | Medium |
|
||||||
|
| **TC-SYNC-004** | **WIP Flag (multiple referenced package PRs)** | 1. Create a ProjectGit PR that references multiple PackageGit PRs.<br>2. Mark one of the PackageGit PRs as "Work In Progress".<br>3. Remove the "Work In Progress" flag from all PackageGit PRs. | 1. The ProjectGit PR is marked as "Work In Progress".<br>2. The "Work In Progress" flag is removed from the ProjectGit PR only after it has been removed from all associated PackageGit PRs. | Medium |
|
||||||
|
| **TC-SYNC-005** | **NoProjectGitPR = true, edits disabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR without "Allow edits from maintainers" enabled. <br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The bot adds a warning comment to the PackageGit PR explaining that it cannot update the PR. | High |
|
||||||
|
| **TC-SYNC-006** | **NoProjectGitPR = true, edits enabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR with "Allow edits from maintainers" enabled.<br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The submodule commit on the project PR is updated with the new commit from the PackageGit PR. | High |
|
||||||
|
| **TC-COMMENT-001** | **Detect duplicate comments** | 1. Create a PackageGit PR.<br>2. Wait for the `workflow-pr` bot to act on the PR.<br>3. Edit the body of the PR to trigger the bot a second time. | 1. The bot should not post a duplicate comment. | High |
|
||||||
|
| **TC-REVIEW-001** | **Add mandatory reviewers** | 1. Create a new PackageGit PR. | 1. All mandatory reviewers are added to both the PackageGit and ProjectGit PRs. | High |
|
||||||
|
| **TC-REVIEW-002** | **Add advisory reviewers** | 1. Create a new PackageGit PR with advisory reviewers defined in the configuration. | 1. Advisory reviewers are added to the PR, but their approval is not required for merging. | Medium |
|
||||||
|
| **TC-REVIEW-003** | **Re-add reviewers** | 1. Push a new commit to a PackageGit PR after it has been approved. | 1. The original reviewers are re-added to the PR. | Medium |
|
||||||
|
| **TC-REVIEW-004** | **Package PR created by a maintainer** | 1. Create a PackageGit PR from the account of a package maintainer. | 1. No review is requested from other package maintainers. | High |
|
||||||
|
| **TC-REVIEW-005** | **Package PR created by an external user (approve)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers approves the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer approves the PR, the other maintainers are removed as reviewers. | High |
|
||||||
|
| **TC-REVIEW-006** | **Package PR created by an external user (reject)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers rejects the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer rejects the PR, the other maintainers are removed as reviewers. | High |
|
||||||
|
| **TC-REVIEW-007** | **Package PR created by a maintainer with ReviewRequired=true** | 1. Set `ReviewRequired = true` in `workflow.config`.<br>2. Create a PackageGit PR from the account of a package maintainer. | 1. A review is requested from other package maintainers if available. | High |
|
||||||
|
| **TC-MERGE-001** | **Automatic Merge** | 1. Create a PackageGit PR.<br>2. Ensure all mandatory reviews are completed on both project and package PRs. | 1. The PR is automatically merged. | High |
|
||||||
|
| **TC-MERGE-002** | **ManualMergeOnly with Package Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a package maintainer for that package. | 1. The PR is merged. | High |
|
||||||
|
| **TC-MERGE-003** | **ManualMergeOnly with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a user who is not a maintainer for that package. | 1. The PR is not merged. | High |
|
||||||
|
| **TC-MERGE-004** | **ManualMergeOnly with multiple packages** | 1. Create a ProjectGit PR that references multiple PackageGit PRs with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on each package PR from the account of a package maintainer. | 1. The PR is merged only after "merge ok" is commented on all associated PackageGit PRs. | High |
|
||||||
|
| **TC-MERGE-005** | **ManualMergeOnly with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||||
|
| **TC-MERGE-006** | **ManualMergeProject with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||||
|
| **TC-MERGE-007** | **ManualMergeProject with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a package maintainer. | 1. The PR is not merged. | High |
|
||||||
|
| **TC-CONFIG-001** | **Invalid Configuration** | 1. Provide an invalid `workflow.config` file. | 1. The bot reports an error and does not process any PRs. | High |
|
||||||
|
| **TC-LABEL-001** | **Apply `staging/Auto` label** | 1. Create a new PackageGit PR. | 1. The `staging/Auto` label is applied to the ProjectGit PR. | High |
|
||||||
|
| **TC-LABEL-002** | **Apply `review/Pending` label** | 1. Create a new PackageGit PR. | 1. The `review/Pending` label is applied to the ProjectGit PR when there are pending reviews. | Medium |
|
||||||
|
| **TC-LABEL-003** | **Apply `review/Done` label** | 1. Ensure all mandatory reviews for a PR are completed. | 1. The `review/Done` label is applied to the ProjectGit PR when all mandatory reviews are completed. | Medium |
|
||||||
|
|
||||||
23
giwtf/tests/data/build_result.xml.template
Normal file
23
giwtf/tests/data/build_result.xml.template
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
<resultlist state="0fef640bfb56c3e76fcfb698b19b59c0">
|
||||||
|
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="aarch64" code="unpublished" state="unpublished">
|
||||||
|
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||||
|
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||||
|
<status package="openjpeg2" code="succeeded"/>
|
||||||
|
</result>
|
||||||
|
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="ppc64le" code="unpublished" state="unpublished">
|
||||||
|
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||||
|
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||||
|
<status package="openjpeg2" code="succeeded"/>
|
||||||
|
</result>
|
||||||
|
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="x86_64" code="unpublished" state="unpublished">
|
||||||
|
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||||
|
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||||
|
<status package="openjpeg2" code="succeeded"/>
|
||||||
|
</result>
|
||||||
|
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="s390x" code="unpublished" state="unpublished">
|
||||||
|
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||||
|
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||||
|
<status package="openjpeg2" code="succeeded"/>
|
||||||
|
</result>
|
||||||
|
</resultlist>
|
||||||
|
|
||||||
18
giwtf/tests/data/source_openSUSE_Leap_16.0_PullRequest__meta
Normal file
18
giwtf/tests/data/source_openSUSE_Leap_16.0_PullRequest__meta
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||||
|
<title>Leap 16.0 PullRequest area</title>
|
||||||
|
<description>Base project to define the pull request builds</description>
|
||||||
|
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||||
|
<person userid="maxlin_factory" role="maintainer"/>
|
||||||
|
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||||
|
<debuginfo>
|
||||||
|
<enable/>
|
||||||
|
</debuginfo>
|
||||||
|
<repository name="standard">
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
</project>
|
||||||
59
giwtf/tests/data/source_openSUSE_Leap_16.0__meta
Normal file
59
giwtf/tests/data/source_openSUSE_Leap_16.0__meta
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
<project name="openSUSE:Leap:16.0">
|
||||||
|
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||||
|
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||||
|
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||||
|
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||||
|
<person userid="dimstar_suse" role="maintainer"/>
|
||||||
|
<person userid="lkocman-factory" role="maintainer"/>
|
||||||
|
<person userid="maxlin_factory" role="maintainer"/>
|
||||||
|
<person userid="factory-auto" role="reviewer"/>
|
||||||
|
<person userid="licensedigger" role="reviewer"/>
|
||||||
|
<group groupid="autobuild-team" role="maintainer"/>
|
||||||
|
<group groupid="factory-maintainers" role="maintainer"/>
|
||||||
|
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||||
|
<group groupid="factory-staging" role="reviewer"/>
|
||||||
|
<build>
|
||||||
|
<disable repository="ports"/>
|
||||||
|
</build>
|
||||||
|
<debuginfo>
|
||||||
|
<enable/>
|
||||||
|
</debuginfo>
|
||||||
|
<repository name="standard" rebuild="local">
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>local</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="product">
|
||||||
|
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||||
|
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>local</arch>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="ports">
|
||||||
|
<arch>armv7l</arch>
|
||||||
|
</repository>
|
||||||
|
<repository name="images">
|
||||||
|
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||||
|
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||||
|
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||||
|
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||||
|
<arch>i586</arch>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<arch>aarch64</arch>
|
||||||
|
<arch>ppc64le</arch>
|
||||||
|
<arch>s390x</arch>
|
||||||
|
</repository>
|
||||||
|
</project>
|
||||||
239
giwtf/tests/test_pr_workflow.py
Executable file
239
giwtf/tests/test_pr_workflow.py
Executable file
@@ -0,0 +1,239 @@
|
|||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import subprocess
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Constants
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
GITEA_URL = os.environ.get("GITEA_URL", "http://localhost:3000")
|
||||||
|
TOKEN_FILE = Path(__file__).parent.parent / "gitea-data" / "admin.token"
|
||||||
|
if TOKEN_FILE.exists():
|
||||||
|
with open(TOKEN_FILE) as f:
|
||||||
|
GITEA_TOKEN = f.read().strip()
|
||||||
|
else:
|
||||||
|
GITEA_TOKEN = os.environ.get("GITEA_TOKEN")
|
||||||
|
|
||||||
|
TEST_DATA_DIR = Path(__file__).parent / "data"
|
||||||
|
BUILD_RESULT_TEMPLATE = TEST_DATA_DIR / "build_result.xml.template"
|
||||||
|
MOCK_RESPONSES_DIR = Path(__file__).parent.parent / "mock-obs" / "responses"
|
||||||
|
MOCK_BUILD_RESULT_FILE = MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0:PullRequest:*__result"
|
||||||
|
MOCK_BUILD_RESULT_FILE1 = MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0__result"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_build_result():
|
||||||
|
"""
|
||||||
|
Fixture to create a mock build result file from the template.
|
||||||
|
Returns a factory function that the test can call with parameters.
|
||||||
|
"""
|
||||||
|
def _create_result_file(package_name: str, code: str):
|
||||||
|
tree = ET.parse(BUILD_RESULT_TEMPLATE)
|
||||||
|
root = tree.getroot()
|
||||||
|
for status_tag in root.findall(".//status"):
|
||||||
|
status_tag.set("package", package_name)
|
||||||
|
status_tag.set("code", code)
|
||||||
|
|
||||||
|
MOCK_RESPONSES_DIR.mkdir(exist_ok=True)
|
||||||
|
tree.write(MOCK_BUILD_RESULT_FILE)
|
||||||
|
tree.write(MOCK_BUILD_RESULT_FILE1)
|
||||||
|
return str(MOCK_BUILD_RESULT_FILE)
|
||||||
|
|
||||||
|
yield _create_result_file
|
||||||
|
|
||||||
|
if MOCK_BUILD_RESULT_FILE.exists():
|
||||||
|
MOCK_BUILD_RESULT_FILE.unlink()
|
||||||
|
MOCK_BUILD_RESULT_FILE1.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# HELPER FUNCTIONS
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
def create_gitea_pr(repo: str, diff_content: str, message: str):
|
||||||
|
"""Creates a Gitea pull request using the two-step diffpatch and pulls API."""
|
||||||
|
if not GITEA_TOKEN:
|
||||||
|
pytest.fail("GITEA_TOKEN not set or token file not found.")
|
||||||
|
|
||||||
|
unique_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))
|
||||||
|
new_branch_name = f"pr_test_{unique_id}"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"token {GITEA_TOKEN}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
diffpatch_url = f"{GITEA_URL}/api/v1/repos/{repo}/diffpatch"
|
||||||
|
diffpatch_payload = {
|
||||||
|
"branch": "main",
|
||||||
|
"new_branch": new_branch_name,
|
||||||
|
"content": diff_content,
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
patch_response = requests.post(diffpatch_url, headers=headers, json=diffpatch_payload)
|
||||||
|
patch_response.raise_for_status()
|
||||||
|
|
||||||
|
pulls_url = f"{GITEA_URL}/api/v1/repos/{repo}/pulls"
|
||||||
|
pulls_payload = {
|
||||||
|
"base": "main",
|
||||||
|
"head": new_branch_name,
|
||||||
|
"title": message,
|
||||||
|
"body": message,
|
||||||
|
}
|
||||||
|
pr_response = requests.post(pulls_url, headers=headers, json=pulls_payload)
|
||||||
|
pr_response.raise_for_status()
|
||||||
|
return pr_response.json()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_timeline_events(repo, pr_number):
|
||||||
|
"""Helper to fetch timeline events for a given PR."""
|
||||||
|
url = f"{GITEA_URL}/api/v1/repos/{repo}/issues/{pr_number}/timeline"
|
||||||
|
headers = {"Authorization": f"token {GITEA_TOKEN}"}
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_comments(repo, pr_number):
|
||||||
|
"""Helper to fetch comments for a given PR."""
|
||||||
|
url = f"{GITEA_URL}/api/v1/repos/{repo}/issues/{pr_number}/comments"
|
||||||
|
headers = {"Authorization": f"token {GITEA_TOKEN}"}
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_pr_details(repo, pr_number):
|
||||||
|
"""Helper to fetch PR details."""
|
||||||
|
url = f"{GITEA_URL}/api/v1/repos/{repo}/pulls/{pr_number}"
|
||||||
|
headers = {"Authorization": f"token {GITEA_TOKEN}"}
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# TEST CASES
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not all([GITEA_URL, GITEA_TOKEN]), reason="GITEA_URL and GITEA_TOKEN must be set")
|
||||||
|
def test_pr_workflow_succeeded(mock_build_result):
|
||||||
|
"""End-to-end test for a successful PR workflow."""
|
||||||
|
diff = "diff --git a/test.txt b/test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||||
|
pr = create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed")
|
||||||
|
initial_pr_number = pr["number"]
|
||||||
|
|
||||||
|
compose_dir = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
forwarded_pr_number = None
|
||||||
|
print(f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event...")
|
||||||
|
for _ in range(20):
|
||||||
|
time.sleep(1)
|
||||||
|
timeline_events = _get_timeline_events("pool/pkgA", initial_pr_number)
|
||||||
|
for event in timeline_events:
|
||||||
|
if event.get("type") == "pull_ref":
|
||||||
|
if not (ref_issue := event.get("ref_issue")):
|
||||||
|
continue
|
||||||
|
url_to_check = ref_issue.get("html_url", "")
|
||||||
|
match = re.search(r'products/SLFO/pulls/(\d+)', url_to_check)
|
||||||
|
if match:
|
||||||
|
forwarded_pr_number = match.group(1)
|
||||||
|
break
|
||||||
|
if forwarded_pr_number:
|
||||||
|
break
|
||||||
|
assert forwarded_pr_number is not None, "Workflow bot did not create a pull_ref event on the timeline."
|
||||||
|
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||||
|
|
||||||
|
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||||
|
reviewer_added = False
|
||||||
|
for _ in range(15):
|
||||||
|
time.sleep(1)
|
||||||
|
pr_details = _get_pr_details("products/SLFO", forwarded_pr_number)
|
||||||
|
if any(r.get('login') == 'autogits_obs_staging_bot' for r in pr_details.get('requested_reviewers', [])):
|
||||||
|
reviewer_added = True
|
||||||
|
break
|
||||||
|
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||||
|
print("Staging bot has been added as a reviewer.")
|
||||||
|
|
||||||
|
mock_build_result(package_name="pkgA", code="succeeded")
|
||||||
|
|
||||||
|
print("Restarting obs-staging-bot...")
|
||||||
|
subprocess.run(["podman-compose", "restart", "obs-staging-bot"], cwd=compose_dir, check=True, capture_output=True)
|
||||||
|
|
||||||
|
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||||
|
status_comment_found = False
|
||||||
|
for _ in range(20):
|
||||||
|
time.sleep(1)
|
||||||
|
timeline_events = _get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||||
|
for event in timeline_events:
|
||||||
|
print(event.get("body", "not a body"))
|
||||||
|
if event.get("body") and "successful" in event["body"]:
|
||||||
|
status_comment_found = True
|
||||||
|
break
|
||||||
|
if status_comment_found:
|
||||||
|
break
|
||||||
|
assert status_comment_found, "Staging bot did not post a 'successful' comment."
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not all([GITEA_URL, GITEA_TOKEN]), reason="GITEA_URL and GITEA_TOKEN must be set")
|
||||||
|
def test_pr_workflow_failed(mock_build_result):
|
||||||
|
"""End-to-end test for a failed PR workflow."""
|
||||||
|
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||||
|
pr = create_gitea_pr("pool/pkgA", diff, "Test PR - should fail")
|
||||||
|
initial_pr_number = pr["number"]
|
||||||
|
|
||||||
|
compose_dir = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
forwarded_pr_number = None
|
||||||
|
print(f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event...")
|
||||||
|
for _ in range(20):
|
||||||
|
time.sleep(1)
|
||||||
|
timeline_events = _get_timeline_events("pool/pkgA", initial_pr_number)
|
||||||
|
for event in timeline_events:
|
||||||
|
if event.get("type") == "pull_ref":
|
||||||
|
if not (ref_issue := event.get("ref_issue")):
|
||||||
|
continue
|
||||||
|
url_to_check = ref_issue.get("html_url", "")
|
||||||
|
match = re.search(r'products/SLFO/pulls/(\d+)', url_to_check)
|
||||||
|
if match:
|
||||||
|
forwarded_pr_number = match.group(1)
|
||||||
|
break
|
||||||
|
if forwarded_pr_number:
|
||||||
|
break
|
||||||
|
assert forwarded_pr_number is not None, "Workflow bot did not create a pull_ref event on the timeline."
|
||||||
|
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||||
|
|
||||||
|
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||||
|
reviewer_added = False
|
||||||
|
for _ in range(15):
|
||||||
|
time.sleep(1)
|
||||||
|
pr_details = _get_pr_details("products/SLFO", forwarded_pr_number)
|
||||||
|
if any(r.get('login') == 'autogits_obs_staging_bot' for r in pr_details.get('requested_reviewers', [])):
|
||||||
|
reviewer_added = True
|
||||||
|
break
|
||||||
|
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||||
|
print("Staging bot has been added as a reviewer.")
|
||||||
|
|
||||||
|
mock_build_result(package_name="pkgA", code="failed")
|
||||||
|
|
||||||
|
print("Restarting obs-staging-bot...")
|
||||||
|
subprocess.run(["podman-compose", "restart", "obs-staging-bot"], cwd=compose_dir, check=True, capture_output=True)
|
||||||
|
|
||||||
|
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||||
|
status_comment_found = False
|
||||||
|
for _ in range(20):
|
||||||
|
time.sleep(1)
|
||||||
|
timeline_events = _get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||||
|
for event in timeline_events:
|
||||||
|
if event.get("body") and "failed" in event["body"]:
|
||||||
|
status_comment_found = True
|
||||||
|
break
|
||||||
|
if status_comment_found:
|
||||||
|
break
|
||||||
|
assert status_comment_found, "Staging bot did not post a 'failed' comment."
|
||||||
1
giwtf/workflow-pr/Dockerfile
Symbolic link
1
giwtf/workflow-pr/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
Dockerfile.package
|
||||||
17
giwtf/workflow-pr/Dockerfile.local
Normal file
17
giwtf/workflow-pr/Dockerfile.local
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Use the same base image as the Gitea container
|
||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
# Add the custom CA to the trust store
|
||||||
|
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
RUN update-ca-certificates
|
||||||
|
|
||||||
|
# Install git and ssh
|
||||||
|
RUN zypper -n in git-core openssh-clients binutils
|
||||||
|
|
||||||
|
# Copy the pre-built binary into the container
|
||||||
|
COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr
|
||||||
|
COPY workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
# Set the entrypoint for the container
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
22
giwtf/workflow-pr/Dockerfile.package
Normal file
22
giwtf/workflow-pr/Dockerfile.package
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Use the same base image as the Gitea container
|
||||||
|
FROM registry.suse.com/bci/bci-base:15.7
|
||||||
|
|
||||||
|
# Add the custom CA to the trust store
|
||||||
|
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||||
|
RUN update-ca-certificates
|
||||||
|
|
||||||
|
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||||
|
RUN zypper --gpg-auto-import-keys ref
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Install git and ssh
|
||||||
|
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils
|
||||||
|
|
||||||
|
# Copy the pre-built binary into the container
|
||||||
|
# COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr
|
||||||
|
COPY workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
# Set the entrypoint for the container
|
||||||
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
66
giwtf/workflow-pr/entrypoint.sh
Normal file
66
giwtf/workflow-pr/entrypoint.sh
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||||
|
|
||||||
|
# Wait for the token file to be created by the gitea setup script
|
||||||
|
echo "Waiting for $TOKEN_FILE..."
|
||||||
|
while [ ! -s "$TOKEN_FILE" ]; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
# Read token and trim whitespace/newlines
|
||||||
|
GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ' )
|
||||||
|
|
||||||
|
if [ -z "$GITEA_TOKEN" ]; then
|
||||||
|
echo "Error: Token file $TOKEN_FILE is empty after trimming."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
export GITEA_TOKEN
|
||||||
|
echo "GITEA_TOKEN exported (length: ${#GITEA_TOKEN})"
|
||||||
|
|
||||||
|
# Wait for the dummy data to be created by the gitea setup script
|
||||||
|
echo "Waiting for workflow.config in products/SLFO..."
|
||||||
|
API_URL="http://gitea-test:3000/api/v1/repos/products/SLFO/contents/workflow.config"
|
||||||
|
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||||
|
|
||||||
|
while [ "$HTTP_STATUS" != "200" ]; do
|
||||||
|
echo "workflow.config not found yet (HTTP Status: $HTTP_STATUS). Retrying in 5s..."
|
||||||
|
sleep 5
|
||||||
|
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||||
|
done
|
||||||
|
|
||||||
|
# Wait for the shared SSH key to be generated by the gitea setup script
|
||||||
|
echo "Waiting for /var/lib/gitea/ssh-keys/id_ed25519..."
|
||||||
|
while [ ! -f /var/lib/gitea/ssh-keys/id_ed25519 ]; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
export AUTOGITS_IDENTITY_FILE="/root/.ssh/id_ed25519"
|
||||||
|
|
||||||
|
# Pre-populate known_hosts with Gitea's SSH host key
|
||||||
|
echo "Preparing SSH environment in /root/.ssh..."
|
||||||
|
mkdir -p /root/.ssh
|
||||||
|
chmod 700 /root/.ssh
|
||||||
|
|
||||||
|
# Copy the private key to the standard location and set permissions
|
||||||
|
cp /var/lib/gitea/ssh-keys/id_ed25519 /root/.ssh/id_ed25519
|
||||||
|
chmod 600 /root/.ssh/id_ed25519
|
||||||
|
|
||||||
|
echo "Scanning Gitea SSH host key..."
|
||||||
|
# We try multiple times because Gitea might still be starting its SSH server
|
||||||
|
for i in {1..10}; do
|
||||||
|
ssh-keyscan -p 3022 gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
|
||||||
|
echo "Retrying ssh-keyscan in 2s..."
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
chmod 644 /root/.ssh/known_hosts
|
||||||
|
|
||||||
|
exe=$(which workflow-pr)
|
||||||
|
exe=${exe:-/usr/local/bin/workflow-pr}
|
||||||
|
|
||||||
|
package=$(rpm -qa | grep autogits-workflow-pr) || :
|
||||||
|
|
||||||
|
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||||
|
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||||
|
|
||||||
|
exec "$exe" "$@"
|
||||||
3
giwtf/workflow-pr/workflow-pr.json
Normal file
3
giwtf/workflow-pr/workflow-pr.json
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[
|
||||||
|
"products/SLFO#main"
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user