SHA256
1
0

Compare commits

..

15 Commits

Author SHA256 Message Date
Frank Schreiner
7436607a57 t: create logfile when running make pytest 2026-03-11 18:14:17 +01:00
Frank Schreiner
2b33bbca6f t: rename container obs-staging-bot to gwf-obs-staging-bot 2026-03-11 15:44:57 +01:00
Frank Schreiner
aa858f9e54 t: rename container workflow-pr to gwf-workflow-pr 2026-03-11 15:17:13 +01:00
Frank Schreiner
185e8cde6d t: rename container gitea-publisher to gwf-gitea-publisher 2026-03-11 15:07:39 +01:00
Frank Schreiner
445cb89486 t: rename container rabbitmq-test to gwf-rabbitmq-test 2026-03-11 15:07:30 +01:00
Frank Schreiner
1c5c8cbcf1 t: add distclean target to Makefile 2026-03-11 15:06:47 +01:00
Frank Schreiner
86f1ae18ce t: rename gitea-test container to 'gwf-gitea-test' 2026-03-11 14:32:14 +01:00
Frank Schreiner
c3b70b5999 t: rename tester container to gwf-testclient 2026-03-11 14:11:00 +01:00
Frank Schreiner
b2d737d636 t: moved commands from clean.sh into Makefile
sudo should no longer be needed
2026-03-11 13:24:00 +01:00
Frank Schreiner
773aa3bd30 t: add logging to podman-compose.yml 2026-03-11 13:19:50 +01:00
Frank Schreiner
3e28250a6a t: run containers with userns_mode: "keep-id"
* Affected containers:
  * gitea-test
  * rabbmitmq-test
* files on the host now get created with the permissions of the host user
* setup scripts have been modified
  * removed chown commands
  * removed su-wrappers
2026-03-11 12:44:17 +01:00
Frank Schreiner
34ec06040b add vim cache/swap files to gitignore 2026-03-11 11:32:59 +01:00
Frank Schreiner
4aadb9e4bc t: add Dockerfiles for building in OBS 2026-03-11 11:32:58 +01:00
b04755c667 Merge branch 'main' into refactoring-make 2026-03-10 12:30:20 +01:00
1fc0be5f60 make: refactor build target into per-module dependencies
- Replace shell loop in build with explicit module targets
- Add .PHONY for build and module targets
- Keep go build -C <module> -buildmode=pie behavior unchanged
- Enable parallel builds via make -jN build
2026-03-06 11:33:32 +01:00
23 changed files with 164 additions and 571 deletions

2
.gitignore vendored
View File

@@ -1,5 +1,7 @@
*.osc
*.conf
.*.sw?
/integration/logs/*.log
!/integration/**/*.conf
/integration/gitea-data
/integration/gitea-logs

View File

@@ -1,4 +1,8 @@
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-groups-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
build:
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done
.PHONY: build $(MODULES)
build: $(MODULES)
$(MODULES):
go build -C $@ -buildmode=pie

View File

@@ -5,7 +5,7 @@
# Typical workflow:
# 1. 'make build' - prepares images
# 2. 'make up' - spawns podman-compose
# 3. 'make pytest' - run tests inside the tester container
# 3. 'make pytest' - run tests inside the gwf-testclient container
# 4. 'make down' - once the containers are not needed
#
# OR just run 'make test' to do it all at once.
@@ -33,36 +33,36 @@ wait_healthy:
@echo "Waiting for services to be healthy..."
@echo "Waiting for gitea (max 2m)..."
@start_time=$$(date +%s); \
until podman exec gitea-test curl -f -s http://localhost:3000/api/v1/version >/dev/null 2>&1; do \
until podman exec gwf-gitea-test curl -f -s http://localhost:3000/api/v1/version >/dev/null 2>&1; do \
current_time=$$(date +%s); \
elapsed=$$((current_time - start_time)); \
if [ $$elapsed -gt 120 ]; then \
echo "ERROR: Gitea failed to start within 2 minutes."; \
echo "--- Troubleshooting Info ---"; \
echo "Diagnostics output (curl):"; \
podman exec gitea-test curl -v http://localhost:3000/api/v1/version || true; \
podman exec gwf-gitea-test curl -v http://localhost:3000/api/v1/version || true; \
echo "--- Container Logs ---"; \
podman logs gitea-test --tail 20; \
podman logs gwf-gitea-test --tail 20; \
echo "--- Container Status ---"; \
podman inspect gitea-test --format '{{.State.Status}}'; \
podman inspect gwf-gitea-test --format '{{.State.Status}}'; \
exit 1; \
fi; \
sleep 2; \
done
@echo "Waiting for rabbitmq (max 2m)..."
@start_time=$$(date +%s); \
until podman exec rabbitmq-test rabbitmq-diagnostics check_running -q >/dev/null 2>&1; do \
until podman exec gwf-rabbitmq-test rabbitmq-diagnostics check_running -q >/dev/null 2>&1; do \
current_time=$$(date +%s); \
elapsed=$$((current_time - start_time)); \
if [ $$elapsed -gt 120 ]; then \
echo "ERROR: RabbitMQ failed to start within 2 minutes."; \
echo "--- Troubleshooting Info ---"; \
echo "Diagnostics output:"; \
podman exec rabbitmq-test rabbitmq-diagnostics check_running || true; \
podman exec gwf-rabbitmq-test rabbitmq-diagnostics check_running || true; \
echo "--- Container Logs ---"; \
podman logs rabbitmq-test --tail 20; \
podman logs gwf-rabbitmq-test --tail 20; \
echo "--- Container Status ---"; \
podman inspect rabbitmq-test --format '{{.State.Status}}'; \
podman inspect gwf-rabbitmq-test --format '{{.State.Status}}'; \
exit 1; \
fi; \
sleep 2; \
@@ -70,7 +70,7 @@ wait_healthy:
@echo "All services are healthy!"
pytest:
podman-compose exec tester pytest -v tests
podman-compose exec gwf-testclient pytest -v -s tests 2>&1|tee logs/make_pytest.log
build:
podman pull docker.io/library/rabbitmq:3.13.7-management
@@ -91,3 +91,12 @@ up-bots-package:
# mode B
up-bots-local:
GIWTF_IMAGE_SUFFIX=.local podman-compose up -d
clean: down
rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/ logs/*.log
distclean: clean
CONTAINERS=`podman images|grep -P 'localhost.*(integration|gwf-).*'|awk '{print $$1}'`;\
[ -n "$$CONTAINERS" ] && \
podman rmi -f $$CONTAINERS || \
true

View File

@@ -10,7 +10,7 @@ This document describes the targets available in the `integration/Makefile`.
1. `build`: Prepares all container images.
2. `up`: Starts all services via `podman-compose`.
3. `wait_healthy`: Polls Gitea and RabbitMQ until they are ready.
4. `pytest`: Executes the test suite inside the `tester` container.
4. `pytest`: Executes the test suite inside the `gwf-testclient` container.
- **Outcome**: The environment remains active for fast iteration.
### `test-ci`
@@ -30,11 +30,11 @@ This document describes the targets available in the `integration/Makefile`.
- **Action**: Starts the container topology in detached mode.
### `wait_healthy`
- **Action**: Polls the health status of `gitea-test` and `rabbitmq-test` containers.
- **Action**: Polls the health status of `gwf-gitea-test` and `gwf-rabbitmq-test` containers.
- **Purpose**: Ensures infrastructure is stable before test execution.
### `pytest`
- **Action**: Runs `pytest -v tests/*` inside the running `tester` container.
- **Action**: Runs `pytest -v tests/*` inside the running `gwf-testclient` container.
- **Requirement**: The environment must already be started via `up`.
### `down`

View File

@@ -1 +0,0 @@
sudo rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/

View File

@@ -7,9 +7,9 @@ STATIC_ROOT_PATH = /usr/share/gitea
APP_DATA_PATH = /var/lib/gitea/data
PPROF_DATA_PATH = /var/lib/gitea/data/tmp/pprof
PROTOCOL = http
DOMAIN = gitea-test
SSH_DOMAIN = gitea-test
ROOT_URL = http://gitea-test:3000/
DOMAIN = gwf-gitea-test
SSH_DOMAIN = gwf-gitea-test
ROOT_URL = http://gwf-gitea-test:3000/
HTTP_PORT = 3000
DISABLE_SSH = false
START_SSH_SERVER = true
@@ -39,4 +39,4 @@ LEVEL = Debug
ENABLE_BASIC_AUTHENTICATION = true
[webhook]
ALLOWED_HOST_LIST = gitea-publisher
ALLOWED_HOST_LIST = gwf-gitea-publisher

View File

@@ -16,4 +16,4 @@ echo "Starting Gitea..."
# We will switch to that user and run the web command.
# Using exec means Gitea will become PID 1, allowing it to receive signals correctly.
cd /var/lib/gitea
exec su -s /bin/bash gitea -c "/usr/bin/gitea web --config /etc/gitea/conf/app.ini"
exec /usr/bin/gitea web --config /etc/gitea/conf/app.ini

View File

@@ -4,18 +4,16 @@ set -e
# Set ownership on the volume mounts. This allows the 'gitea' user to write to them.
# We use -R to ensure all subdirectories (like /var/lib/gitea/data) are covered.
chown -R gitea:gitea /var/lib/gitea /var/log/gitea
# Set ownership on the config directory.
chown -R gitea:gitea /etc/gitea
# Run database migrations to initialize the sqlite3 db based on app.ini.
su -s /bin/bash gitea -c 'gitea migrate'
gitea migrate
# Create a default admin user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
if ! gitea admin user list | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
echo "Creating admin user..."
su -s /bin/bash gitea -c 'gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin'
gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin
else
echo "Admin user already exists."
fi
@@ -26,11 +24,10 @@ if [ -f "$ADMIN_TOKEN_FILE" ]; then
echo "Admin token already exists at $ADMIN_TOKEN_FILE."
else
echo "Generating admin token..."
ADMIN_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t admin-token")
ADMIN_TOKEN=$(gitea admin user generate-access-token -raw -u admin -t admin-token)
if [ -n "$ADMIN_TOKEN" ]; then
printf "%s" "$ADMIN_TOKEN" > "$ADMIN_TOKEN_FILE"
chmod 777 "$ADMIN_TOKEN_FILE"
chown gitea:gitea "$ADMIN_TOKEN_FILE"
echo "Admin token generated and saved to $ADMIN_TOKEN_FILE."
else
echo "Failed to generate admin token."
@@ -43,16 +40,15 @@ mkdir -p "$SSH_KEY_DIR"
if [ ! -f "$SSH_KEY_DIR/id_ed25519" ]; then
echo "Generating SSH key for admin user..."
ssh-keygen -t ed25519 -N "" -f "$SSH_KEY_DIR/id_ed25519"
chown -R gitea:gitea "$SSH_KEY_DIR"
chmod 700 "$SSH_KEY_DIR"
chmod 600 "$SSH_KEY_DIR/id_ed25519"
chmod 644 "$SSH_KEY_DIR/id_ed25519.pub"
fi
# Create a autogits_obs_staging_bot user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
if ! gitea admin user list | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
echo "Creating autogits_obs_staging_bot user..."
su -s /bin/bash gitea -c 'gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false'
gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false
else
echo "autogits_obs_staging_bot user already exists."
fi
@@ -63,11 +59,10 @@ if [ -f "$BOT_TOKEN_FILE" ]; then
echo "autogits_obs_staging_bot token already exists at $BOT_TOKEN_FILE."
else
echo "Generating autogits_obs_staging_bot token..."
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token")
BOT_TOKEN=$(gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token)
if [ -n "$BOT_TOKEN" ]; then
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
chmod 666 "$BOT_TOKEN_FILE"
chown gitea:gitea "$BOT_TOKEN_FILE"
echo "autogits_obs_staging_bot token generated and saved to $BOT_TOKEN_FILE."
else
echo "Failed to generate autogits_obs_staging_bot token."
@@ -75,9 +70,9 @@ else
fi
# Create a workflow-pr user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
if ! gitea admin user list | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
echo "Creating workflow-pr user..."
su -s /bin/bash gitea -c 'gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false'
gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false
else
echo "workflow-pr user already exists."
fi
@@ -88,11 +83,10 @@ if [ -f "$BOT_TOKEN_FILE" ]; then
echo "workflow-pr token already exists at $BOT_TOKEN_FILE."
else
echo "Generating workflow-pr token..."
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token")
BOT_TOKEN=$(gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token)
if [ -n "$BOT_TOKEN" ]; then
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
chmod 666 "$BOT_TOKEN_FILE"
chown gitea:gitea "$BOT_TOKEN_FILE"
echo "workflow-pr token generated and saved to $BOT_TOKEN_FILE."
else
echo "Failed to generate workflow-pr token."

View File

@@ -2,7 +2,7 @@
set -e
GITEA_URL="http://localhost:3000"
WEBHOOK_URL="http://gitea-publisher:8002/rabbitmq-forwarder"
WEBHOOK_URL="http://gwf-gitea-publisher:8002/rabbitmq-forwarder"
TOKEN_NAME="webhook-creator"
echo "Webhook setup script started in background."

0
integration/logs/.keep Normal file
View File

View File

@@ -0,0 +1,7 @@
#!BuildTag: openbuildservice/gwf-test-basecontainer
#!UseOBSRepositories
FROM registry.suse.com/bci/bci-base:15.7
RUN zypper -n install binutils gawk git git-core git-lfs jq make openssh openssh-clients sqlite3 vim which
RUN zypper -n install autogits-gitea-events-rabbitmq-publisher autogits-obs-staging-bot autogits-workflow-pr gitea

View File

@@ -0,0 +1,5 @@
#!BuildTag: openbuildservice/gwf-client
#!UseOBSRepositories
FROM opensuse/tumbleweed
RUN zypper -n install podman podman-compose vim make python3-pytest python3-requests python3-pytest-dependency python3-pytest-httpserver

View File

@@ -9,7 +9,7 @@ This document describes the services defined in `podman-compose.yml` used for in
### gitea
- **Description**: Self-hosted Git service, serving as the central hub for repositories.
- **Container Name**: `gitea-test`
- **Container Name**: `gwf-gitea-test`
- **Image**: Built from `./gitea/Dockerfile`
- **Ports**: `3000` (HTTP), `3022` (SSH)
- **Volumes**: `./gitea-data` (persistent data), `./gitea-logs` (logs)
@@ -17,39 +17,39 @@ This document describes the services defined in `podman-compose.yml` used for in
### rabbitmq
- **Description**: Message broker for asynchronous communication between services.
- **Container Name**: `rabbitmq-test`
- **Container Name**: `gwf-rabbitmq-test`
- **Image**: `rabbitmq:3.13.7-management`
- **Ports**: `5671` (AMQP with TLS), `15672` (Management UI)
- **Volumes**: `./rabbitmq-data`, `./rabbitmq-config/certs`, `./rabbitmq-config/rabbitmq.conf`, `./rabbitmq-config/definitions.json`
- **Healthcheck**: Ensures the broker is running and ready to accept connections.
### gitea-publisher
### gwf-gitea-publisher
- **Description**: Publishes events from Gitea webhooks to the RabbitMQ message queue.
- **Container Name**: `gitea-publisher`
- **Container Name**: `gwf-gitea-publisher`
- **Dependencies**: `gitea` (started), `rabbitmq` (healthy)
- **Topic Domain**: `suse`
### workflow-pr
### gwf-workflow-pr
- **Description**: Manages pull request workflows, synchronizing between ProjectGit and PackageGit.
- **Container Name**: `workflow-pr`
- **Container Name**: `gwf-workflow-pr`
- **Dependencies**: `gitea` (started), `rabbitmq` (healthy)
- **Environment**: Configured via `AUTOGITS_*` variables.
- **Volumes**: `./gitea-data` (read-only), `./workflow-pr/workflow-pr.json` (config), `./workflow-pr-repos` (working directories)
### tester
### gwf-testclient
- **Description**: The dedicated test runner container. It hosts the `pytest` suite and provides a mock OBS API using `pytest-httpserver`.
- **Container Name**: `tester`
- **Image**: Built from `./Dockerfile.tester`
- **Container Name**: `gwf-testclient`
- **Image**: Built from `./Dockerfile.gwf-testclient`
- **Mock API**: Listens on port `8080` within the container network to simulate OBS.
- **Volumes**: Project root mounted at `/opt/project` for source access.
### obs-staging-bot
- **Description**: Interacts with Gitea and the OBS API (mocked by `tester`) to manage staging projects.
- **Container Name**: `obs-staging-bot`
- **Dependencies**: `gitea` (started), `tester` (started)
### gwf-obs-staging-bot
- **Description**: Interacts with Gitea and the OBS API (mocked by `gwf-testclient`) to manage staging projects.
- **Container Name**: `gwf-obs-staging-bot`
- **Dependencies**: `gitea` (started), `gwf-testclient` (started)
- **Environment**:
- `AUTOGITS_STAGING_BOT_POLL_INTERVAL`: Set to `2s` for fast integration testing.
- **Mock Integration**: Points to `http://tester:8080` for both OBS API and Web hosts.
- **Mock Integration**: Points to `http://gwf-testclient:8080` for both OBS API and Web hosts.
---
@@ -58,7 +58,7 @@ This document describes the services defined in `podman-compose.yml` used for in
1. **Build**: `make build` (root) then `make build` (integration).
2. **Up**: `make up` starts all services.
3. **Wait**: `make wait_healthy` ensures infrastructure is ready.
4. **Test**: `make pytest` runs the suite inside the `tester` container.
4. **Test**: `make pytest` runs the suite inside the `gwf-testclient` container.
5. **Down**: `make down` stops and removes containers.
Use `make test` to perform steps 1-4 automatically.

View File

@@ -5,9 +5,9 @@ networks:
driver: bridge
services:
gitea:
gwf-gitea-test:
build: ./gitea
container_name: gitea-test
container_name: gwf-gitea-test
init: true
environment:
- GITEA_WORK_DIR=/var/lib/gitea
@@ -24,10 +24,16 @@ services:
# Persist Gitea's logs to a local directory
- ./gitea-logs:/var/log/gitea:Z
restart: unless-stopped
userns_mode: "keep-id:uid=497,gid=483"
user: "497:497"
logging:
driver: "k8s-file"
options:
path: "logs/gwf-gitea-test.log"
rabbitmq:
gwf-rabbitmq:
image: rabbitmq:3.13.7-management
container_name: rabbitmq-test
container_name: gwf-rabbitmq-test
init: true
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "check_running", "-q"]
@@ -51,33 +57,42 @@ services:
# Mount exchange definitions
- ./rabbitmq-config/definitions.json:/etc/rabbitmq/definitions.json:Z
restart: unless-stopped
userns_mode: "keep-id"
logging:
driver: "k8s-file"
options:
path: "logs/gwf-rabbitmq-test.log"
gitea-publisher:
gwf-gitea-publisher:
build:
context: ..
dockerfile: integration/gitea-events-rabbitmq-publisher/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: gitea-publisher
container_name: gwf-gitea-publisher
init: true
networks:
- gitea-network
depends_on:
gitea:
gwf-gitea-test:
condition: service_started
rabbitmq:
gwf-rabbitmq:
condition: service_healthy
environment:
- RABBITMQ_HOST=rabbitmq-test
- RABBITMQ_HOST=gwf-rabbitmq-test
- RABBITMQ_USERNAME=gitea
- RABBITMQ_PASSWORD=gitea
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
command: [ "-listen", "0.0.0.0:8002", "-topic-domain", "suse", "-debug" ]
restart: unless-stopped
logging:
driver: "k8s-file"
options:
path: "logs/gwf-gitea-publisher.log"
workflow-pr:
gwf-workflow-pr:
build:
context: ..
dockerfile: integration/workflow-pr/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: workflow-pr
container_name: gwf-workflow-pr
init: true
networks:
- gitea-network
@@ -87,9 +102,9 @@ services:
timeout: 5s
retries: 5
depends_on:
gitea:
gwf-gitea-test:
condition: service_started
rabbitmq:
gwf-rabbitmq:
condition: service_healthy
environment:
- AMQP_USERNAME=gitea
@@ -99,21 +114,25 @@ services:
- ./gitea-data:/var/lib/gitea:ro,z
- ./workflow-pr/workflow-pr.json:/etc/workflow-pr.json:ro,z
- ./workflow-pr-repos:/var/lib/workflow-pr/repos:Z
command: [
command: [
"-check-on-start",
"-debug",
"-gitea-url", "http://gitea-test:3000",
"-url", "amqps://rabbitmq-test:5671",
"-debug",
"-gitea-url", "http://gwf-gitea-test:3000",
"-url", "amqps://gwf-rabbitmq-test:5671",
"-config", "/etc/workflow-pr.json",
"-repo-path", "/var/lib/workflow-pr/repos"
]
restart: unless-stopped
logging:
driver: "k8s-file"
options:
path: "logs/gwf-workflow-pr.log"
tester:
gwf-testclient:
build:
context: .
dockerfile: Dockerfile.tester
container_name: tester
dockerfile: Dockerfile.gwf-testclient
container_name: gwf-testclient
init: true
dns_search: .
networks:
@@ -124,27 +143,35 @@ services:
volumes:
- ..:/opt/project:z
command: sleep infinity
logging:
driver: "k8s-file"
options:
path: "logs/gwf-testclient.log"
obs-staging-bot:
gwf-obs-staging-bot:
build:
context: ..
dockerfile: integration/obs-staging-bot/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: obs-staging-bot
container_name: gwf-obs-staging-bot
init: true
networks:
- gitea-network
depends_on:
workflow-pr:
gwf-workflow-pr:
condition: service_started
environment:
- OBS_USER=mock
- OBS_PASSWORD=mock-long-password
- AUTOGITS_STAGING_BOT_POLL_INTERVAL=2s
- AUTOGITS_STAGING_BOT_POLL_INTERVAL=6s
volumes:
- ./gitea-data:/gitea-data:ro,z
command:
- "-debug"
- "-gitea-url=http://gitea-test:3000"
- "-obs=http://tester:8080"
- "-obs-web=http://tester:8080"
- "-gitea-url=http://gwf-gitea-test:3000"
- "-obs=http://gwf-testclient:8080"
- "-obs-web=http://gwf-testclient:8080"
restart: unless-stopped
logging:
driver: "k8s-file"
options:
path: "logs/gwf-obs-staging-bot.log"

View File

@@ -1,30 +1,30 @@
-----BEGIN CERTIFICATE-----
MIIFKzCCAxOgAwIBAgIUJsg/r0ZyIVxtAkrlZKOr4LvYEvMwDQYJKoZIhvcNAQEL
BQAwGDEWMBQGA1UEAwwNcmFiYml0bXEtdGVzdDAeFw0yNjAxMjQxMjQyMjNaFw0z
NjAxMjIxMjQyMjNaMBgxFjAUBgNVBAMMDXJhYmJpdG1xLXRlc3QwggIiMA0GCSqG
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC9OjTq4DgqVo0mRpS8DGRR6SFrSpb2bqnl
YI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4WyRQH5PwnXOA1Lxz51qp1
t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sbKY1+/NVJpMs6dL6NzJXG
N6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYUmm43tjjxPZ81rhCXFx/F
v1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKXOPDxrM1erBN5eCwTpJMS
4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVAsqhiUa2eeAVkFwZl9wRw
lddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hAY/V+VbF1iHGAVIz4XlUj
gC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP+1hZJbm7ePsS5Na+qYG1
LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDPXCxFXDrIcFvldmJYTyeK
7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9bBjNnWaY2z17hijnzVVz
jOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrkNt7DOCUqwBnuBqYiRZFt
A1QicHxaEQIDAQABo20wazAdBgNVHQ4EFgQU3l25Ghab2k7UhwxftZ2vZ1HO9Sow
HwYDVR0jBBgwFoAU3l25Ghab2k7UhwxftZ2vZ1HO9SowDwYDVR0TAQH/BAUwAwEB
/zAYBgNVHREEETAPgg1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3DQEBCwUAA4ICAQB9
ilcsRqIvnyN25Oh668YC/xxyeNTIaIxjMLyJaMylBRjNwo1WfbdpXToaEXgot5gK
5HGlu3OIBBwBryNAlBtf/usxzLzmkEsm1Dsn9sJNY1ZTkD8MO9yyOtLqBlqAsIse
oPVjzSdjk1fP3uyoG/ZUVAFZHZD3/9BEsftfS13oUVxo7vYz1DSyUATT/4QTYMQB
PytL6EKJ0dLyuy7rIkZVkaUi+P7GuDXj25Mi6Zkxaw2QnssSuoqy1bAMkzEyNFK5
0wlNWEY8H3jRZuAz1T4AXb9sjeCgBKZoWXgmGbzleOophdzvlq66UGAWPWYFGp8Q
4GJognovhKzSY9+3n+rMPLAXSao48SYDlyTOZeBo1DTluR5QjVd+NWbEdIsA6buQ
a6uPTSVKsulm7hyUlEZp+SsYAtVoZx3jzKKjZXjnaxOfUFWx6pTxNXvxR7pQ/8Ls
IfduGy4VjKVQdyuwCE7eVEPDK6d53WWs6itziuj7gfq8mHvZivIA65z05lTwqkvb
1WS2aht+zacqVSYyNrK+/kJA2CST3ggc1EO73lRvbfO9LJZWMdO+f/tkXH4zkfmL
A3JtJcLOWuv+ZrZvHMpKlBFNMySxE3IeGX+Ad9bGyhZvZULut95/QD7Xy4cPRZHF
R3SRn0rn/BeTly+5fkEoFk+ttah8IbwzhduPyPIxng==
MIIFNzCCAx+gAwIBAgIUcZ3+bIoJ1Iw/BjKYPzovWJGxVm8wDQYJKoZIhvcNAQEL
BQAwHDEaMBgGA1UEAwwRZ3dmLXJhYmJpdG1xLXRlc3QwHhcNMjYwMzExMTQwNzA5
WhcNMzYwMzA4MTQwNzA5WjAcMRowGAYDVQQDDBFnd2YtcmFiYml0bXEtdGVzdDCC
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL06NOrgOCpWjSZGlLwMZFHp
IWtKlvZuqeVgjvFIjfLruL+g/jB6JlJrCTb567GGw3gV86WwCC+mDhbJFAfk/Cdc
4DUvHPnWqnW3RW6YTcHVEOF6IFMTyWgbUW+aKQ6J6TaDPPvqcrRHWxspjX781Umk
yzp0vo3MlcY3poKlZOTuh5j6Vbls8EbRVEDtFQbzTD1H1EO2phzRJhSabje2OPE9
nzWuEJcXH8W/WTGc1NAl0BM2tOf1Kh/KZJ+bjZ/iWg4bIYGyoljJspc48PGszV6s
E3l4LBOkkxLgnfRoxJ1BPPNrZnjCLYtHS+r8VT+i6rOVGnv7u00ltUCyqGJRrZ54
BWQXBmX3BHCV11jRbzlTnTufDkzeJC/YZM6ElFfCFanMNQVDR9PnqEBj9X5VsXWI
cYBUjPheVSOALbXC5fPTFF2o/xwJ3xQctJunx2aJ7XrUZ4pXA7Tfto/7WFklubt4
+xLk1r6pgbUsLLQaF9CfvUGxhRoYm19FxozAhupo0hif5jpLUVWPEM9cLEVcOshw
W+V2YlhPJ4rsquQ7Y/VFsSLBjI89SG2FtvUCCL1m61mydp2Y9GTbu31sGM2dZpjb
PXuGKOfNVXOM4e4+WlvtxJpqBXMtNPTOtn6J+dlbB2XRLZmuMEB5WuQ23sM4JSrA
Ge4GpiJFkW0DVCJwfFoRAgMBAAGjcTBvMB0GA1UdDgQWBBTeXbkaFpvaTtSHDF+1
na9nUc71KjAfBgNVHSMEGDAWgBTeXbkaFpvaTtSHDF+1na9nUc71KjAPBgNVHRMB
Af8EBTADAQH/MBwGA1UdEQQVMBOCEWd3Zi1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3
DQEBCwUAA4ICAQBr3VTgEEDgtjXaKOCPjcr8hbPtxb5jZYLJLGI9/jr5i1CSL7q6
/Iob91FudW12gSmiSsOdi9bazDbc5JECbpUEGrktEzbcJsc0uMUCD0rt3vDqrMgT
JEuiM4SRYK+GFteRLY8k8jl/TA+PIjKR0s0IycHiwpf/hzsPE8SfVaK85uCiAPij
7vWahg/VXIQjk7c3oA9gqIXAmVtconUXO/Tll41LtyEgMjd3xn8qd+j+6v9WdQsg
fWIT03EY8cjaDVtY/PZctrfFIcVCIGEinaOkFUinkjw1KsUxaSh1CDqFw3IVFwTn
HF3wATHjF389QNgPo3MzPtHv3yPm9yvD1jEyjHiqdXH9rCOGlJ3icTIZlP05HpYq
WUMRsbkYen3VJr+8hME+tELMkSCi9aYBjUl1DwGqDpwb6cR4shvAP8CVD3h6vXX5
weJcSoxpw6LDd98geIgr6cgGgJqThoSy4xPrwmUruT2NRiMFjMiazDXA5iEDcsm9
PeDi33ujoXSIfMV558CvEsf5Wf1krub2+IYVwoVhkx9mQP4BHQq88M6sUKe64Bgc
civn45rXZqkA0n5lpeW/W9uxec/2h+6m6ac4s5SyY3zi1cLwbDqsKYsFzQpjHsFv
wxIvRumGTEpX3P9BpmH8dbRvO5DP1pZaR4aGW67J7bebSYJ2DbjSGD/BEg==
-----END CERTIFICATE-----

View File

@@ -38,7 +38,7 @@ def default_obs_handlers(httpserver, obs_mock_state):
"""
def project_meta_handler(request):
project = request.path.split("/")[2]
scmsync = obs_mock_state.project_metas.get(project, "http://gitea-test:3000/myproducts/mySLFO.git")
scmsync = obs_mock_state.project_metas.get(project, "http://gwf-gitea-test:3000/myproducts/mySLFO.git")
return f'<project name="{project}"><scmsync>{scmsync}</scmsync></project>'
def build_result_handler(request):
@@ -54,7 +54,7 @@ def default_obs_handlers(httpserver, obs_mock_state):
# or we can use the template. For simplicity, let's use a basic one.
xml_template = f"""<resultlist state="mock">
<result project="{project}" repository="standard" arch="x86_64" code="unpublished" state="unpublished">
<scmsync>http://gitea-test:3000/myproducts/mySLFO.git?onlybuild={package_name}#sha</scmsync>
<scmsync>http://gwf-gitea-test:3000/myproducts/mySLFO.git?onlybuild={package_name}#sha</scmsync>
<status package="{package_name}" code="{code}"/>
</result>
</resultlist>"""
@@ -283,7 +283,7 @@ def gitea_env():
"webhooks": set(),
}
gitea_url = "http://gitea-test:3000"
gitea_url = "http://gwf-gitea-test:3000"
admin_token_path = os.path.join(os.path.dirname(__file__), "..", "gitea-data", "admin.token")
admin_token = None
@@ -329,7 +329,7 @@ def gitea_env():
else: stats["repos"]["reused"] += 1
# Create webhook for publisher
new_hook = client.create_webhook(org, repo, "http://gitea-publisher:8002/rabbitmq-forwarder")
new_hook = client.create_webhook(org, repo, "http://gwf-gitea-publisher:8002/rabbitmq-forwarder")
if repo_full not in handled_in_session["webhooks"]:
handled_in_session["webhooks"].add(repo_full)
if new_hook: stats["webhooks"]["new"] += 1

View File

@@ -2,7 +2,7 @@
<title>openSUSE Leap 16.0 based on SLFO</title>
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
<link project="openSUSE:Backports:SLE-16.0"/>
<scmsync>http://gitea-test:3000/myproducts/mySLFO#staging-main</scmsync>
<scmsync>http://gwf-gitea-test:3000/myproducts/mySLFO#staging-main</scmsync>
<person userid="dimstar_suse" role="maintainer"/>
<person userid="lkocman-factory" role="maintainer"/>
<person userid="maxlin_factory" role="maintainer"/>

View File

@@ -238,7 +238,7 @@ index 00000000..{pkg_b_sha}
if hook["config"]["url"] == target_url:
vprint(f"Webhook for {owner}/{repo} already exists with correct URL.")
return False
elif "gitea-publisher" in hook["config"]["url"] or "10.89.0." in hook["config"]["url"]:
elif "gitea-publisher" in hook["config"]["url"] or "gwf-gitea-publisher" in hook["config"]["url"] or "10.89.0." in hook["config"]["url"]:
vprint(f"Found old webhook {hook['id']} with URL {hook['config']['url']}. Deleting...")
self._request("DELETE", f"{url}/{hook['id']}")
except requests.exceptions.HTTPError:

View File

@@ -20,7 +20,7 @@ echo "GITEA_TOKEN exported (length: ${#GITEA_TOKEN})"
# Wait for the dummy data to be created by the gitea setup script
echo "Waiting for workflow.config in myproducts/mySLFO (branch zz-ready-to-start)..."
API_URL="http://gitea-test:3000/api/v1/repos/myproducts/mySLFO/contents/workflow.config?ref=zz-ready-to-start"
API_URL="http://gwf-gitea-test:3000/api/v1/repos/myproducts/mySLFO/contents/workflow.config?ref=zz-ready-to-start"
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
WAITED=false
@@ -52,7 +52,7 @@ chmod 600 /root/.ssh/id_ed25519
echo "Scanning Gitea SSH host key..."
# We try multiple times because Gitea might still be starting its SSH server
for i in {1..10}; do
ssh-keyscan -p 3022 gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
ssh-keyscan -p 3022 gwf-gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
echo "Retrying ssh-keyscan in 2s..."
sleep 2
done

View File

@@ -1 +0,0 @@
obs-groups-bot

View File

@@ -1,242 +0,0 @@
// Connect to the Open Build Service (OBS) API, retrieves a list of all groups,
// and exports their metadata (specifically member lists) into individual JSON files.
//
// The tool supports both command-line flags and environment variables for configuration
// (not for authentication, which is only via env vars), and includes a debug mode for verbose output.
// It handles different XML response formats from the OBS API and ensures that
// the output JSON files are properly sanitized and formatted.
//
// The accepted command-line flags are:
//
// -debug: Enable debug output showing API URLs and responses.
// -instance: Name of the OBS instance (used in metadata, default "openSUSE").
// -host: Base URL of the OBS API (default "http://localhost:3000").
// -output: Directory to save the JSON files (default "groups").
//
// Usage:
//
// # Using environment variables (OBS_USER, OBS_PASSWORD)
// go run main.go
//
// # Targeting a specific OBS instance and output directory
// go run main.go -host "https://api.opensuse.org" -output "./obs_groups"
//
// # Full command with debug mode
// go run main.go -host http://localhost:8000 -output "./obs_groups" -instance "OBS" -debug
package main
import (
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io"
"log"
"os"
"path/filepath"
"strings"
"time"
"src.opensuse.org/autogits/common"
)
type groupsList struct {
XMLName xml.Name `xml:"groups"`
Groups []groupItem `xml:"group"`
}
type groupsListAlt struct {
XMLName xml.Name `xml:"directory"`
Entries []groupEntry `xml:"entry"`
}
type groupEntry struct {
Name string `xml:"name,attr,omitempty"`
Inner string `xml:",innerxml"`
}
func (e *groupEntry) getName() string {
if e.Name != "" {
return e.Name
}
return e.Inner
}
type groupItem struct {
GroupID string `xml:"groupid,attr"`
}
func getAllGroups(client *common.ObsClient) ([]string, error) {
res, err := client.ObsRequest("GET", []string{"group"}, nil)
if err != nil {
return nil, err
}
defer res.Body.Close()
data, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
log.Printf("Response status: %d, body length: %d", res.StatusCode, len(data))
if res.StatusCode != 200 {
bodyStr := string(data)
if len(bodyStr) > 500 {
bodyStr = bodyStr[:500]
}
return nil, fmt.Errorf("Unexpected return code: %d, body: %s", res.StatusCode, bodyStr)
}
// Try parsing as <groups> format
var groupsList groupsList
err = xml.Unmarshal(data, &groupsList)
if err == nil && len(groupsList.Groups) > 0 {
groupIDs := make([]string, len(groupsList.Groups))
for i, g := range groupsList.Groups {
groupIDs[i] = g.GroupID
}
return groupIDs, nil
}
// Try parsing as <directory> format
var groupsAlt groupsListAlt
err = xml.Unmarshal(data, &groupsAlt)
if err == nil && len(groupsAlt.Entries) > 0 {
groupIDs := make([]string, len(groupsAlt.Entries))
for i, e := range groupsAlt.Entries {
groupIDs[i] = e.getName()
}
return groupIDs, nil
}
// Log what we got
bodyStr := string(data)
if len(bodyStr) > 1000 {
bodyStr = bodyStr[:1000]
}
log.Printf("Failed to parse XML, got: %s", bodyStr)
return nil, fmt.Errorf("Could not parse groups response")
}
type GroupOutput struct {
Meta ImportMeta `json:"_meta,omitempty"`
Name string `json:"Name"`
Reviewers []string `json:"Reviewers"`
Silent bool `json:"Silent,omitempty"`
}
type ImportMeta struct {
ImportedFrom string `json:"imported_from"`
ReadOnly bool `json:"read_only"`
ImportTime time.Time `json:"import_time"`
}
func sanitizeFilename(name string) string {
name = strings.ReplaceAll(name, "/", "_")
name = strings.ReplaceAll(name, ":", "_")
name = strings.ReplaceAll(name, " ", "_")
return name
}
func processGroup(client *common.ObsClient, groupID, outputDir, instanceName string, importTime time.Time) error {
meta, err := client.GetGroupMeta(groupID)
if err != nil {
return fmt.Errorf("fetching group meta: %w", err)
}
if meta == nil {
return fmt.Errorf("group not found")
}
common.LogDebug(fmt.Sprintf("Group meta for %s: Title: %s, Persons: %d", groupID, meta.Title, len(meta.Persons.Persons)))
reviewers := make([]string, 0, len(meta.Persons.Persons))
for _, p := range meta.Persons.Persons {
reviewers = append(reviewers, p.UserID)
}
output := GroupOutput{
Meta: ImportMeta{
ImportedFrom: instanceName,
ReadOnly: true,
ImportTime: importTime,
},
Name: groupID,
Reviewers: reviewers,
}
filename := sanitizeFilename(groupID) + ".json"
filePath := filepath.Join(outputDir, filename)
data, err := json.MarshalIndent(output, "", " ")
if err != nil {
return fmt.Errorf("marshaling json: %w", err)
}
if err := os.WriteFile(filePath, data, 0644); err != nil {
return fmt.Errorf("writing file: %w", err)
}
common.LogDebug(fmt.Sprintf("Saved group %s to %s", groupID, filePath))
return nil
}
func main() {
debugModePtr := flag.Bool("debug", false, "Enable debug output showing API URLs")
obsInstance := flag.String("instance", "openSUSE", "OBS instance name (used in metadata)")
obsHost := flag.String("host", "http://localhost:3000", "OBS API host URL")
outputDir := flag.String("output", "groups", "Output directory for JSON files")
flag.Parse()
if *debugModePtr {
common.SetLoggingLevel(common.LogLevelDebug)
}
if err := common.RequireObsSecretToken(); err != nil {
log.Fatal(err)
}
log.Printf("Connecting to OBS at %s (instance: %s)", *obsHost, *obsInstance)
client, err := common.NewObsClient(*obsHost)
if err != nil {
log.Fatalf("Failed to create OBS client: %v", err)
}
log.Println("Fetching list of all groups...")
groupIDs, err := getAllGroups(client)
if err != nil {
log.Fatalf("Failed to get groups list: %v", err)
}
log.Printf("Found %d groups: %v", len(groupIDs), groupIDs)
log.Printf("Found %s ", groupIDs)
err = os.MkdirAll(*outputDir, 0755)
if err != nil {
log.Fatalf("Failed to create output directory: %v", err)
}
importTime := time.Now()
successCount := 0
errorCount := 0
for i, groupID := range groupIDs {
log.Printf("[%d/%d] Fetching group: %s", i+1, len(groupIDs), groupID)
if err := processGroup(client, groupID, *outputDir, *obsInstance, importTime); err != nil {
log.Printf("Error processing group %s: %v", groupID, err)
errorCount++
continue
}
successCount++
time.Sleep(100 * time.Millisecond)
}
log.Printf("Done! Success: %d, Errors: %d", successCount, errorCount)
log.Printf("JSON files saved to: %s", *outputDir)
}

View File

@@ -1,211 +0,0 @@
package main
import (
"encoding/json"
"encoding/xml"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"reflect"
"testing"
"time"
"src.opensuse.org/autogits/common"
)
func TestGroupsListParsing(t *testing.T) {
// Test <groups> format
groupsXML := `<?xml version="1.0" encoding="utf-8"?>
<groups>
<group groupid="group1"/>
<group groupid="group2"/>
<group groupid="group3"/>
</groups>`
var groupsList groupsList
err := xml.Unmarshal([]byte(groupsXML), &groupsList)
if err != nil {
t.Fatalf("Failed to unmarshal groups XML: %v", err)
}
if len(groupsList.Groups) != 3 {
t.Errorf("Expected 3 groups, got %d", len(groupsList.Groups))
}
expected := []string{"group1", "group2", "group3"}
for i, g := range groupsList.Groups {
if g.GroupID != expected[i] {
t.Errorf("Expected group %s, got %s", expected[i], g.GroupID)
}
}
}
func TestProcessGroup(t *testing.T) {
// 1. Mock the OBS API server for GetGroupMeta
groupID := "test:group"
mockGroupMetaResponse := `<?xml version="1.0" encoding="utf-8"?>
<group>
<title>Test Group Title</title>
<person>
<person userid="user1" role="maintainer"/>
<person userid="user2" role="reviewer"/>
</person>
</group>`
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
expectedPath := "/group/" + groupID
if r.URL.Path != expectedPath {
t.Errorf("Expected path %s, got %s", expectedPath, r.URL.Path)
http.NotFound(w, r)
return
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(mockGroupMetaResponse))
}))
defer server.Close()
// 2. Create a temporary directory for output
outputDir := t.TempDir()
// 3. Initialize client pointing to mock server
client, err := common.NewObsClient(server.URL)
if err != nil {
t.Fatalf("Failed to create client: %v", err)
}
// 4. Call processGroup
instanceName := "test-instance"
importTime := time.Now().UTC().Truncate(time.Second) // Truncate for stable comparison
err = processGroup(client, groupID, outputDir, instanceName, importTime)
if err != nil {
t.Fatalf("processGroup failed: %v", err)
}
// 5. Verify the output file
expectedFilename := sanitizeFilename(groupID) + ".json"
filePath := filepath.Join(outputDir, expectedFilename)
// Check if file exists
if _, err := os.Stat(filePath); os.IsNotExist(err) {
t.Fatalf("Expected output file was not created: %s", filePath)
}
// Read and verify file content
data, err := os.ReadFile(filePath)
if err != nil {
t.Fatalf("Failed to read output file: %v", err)
}
var result GroupOutput
if err := json.Unmarshal(data, &result); err != nil {
t.Fatalf("Failed to unmarshal output JSON: %v", err)
}
// Assertions
expectedReviewers := []string{"user1", "user2"}
expectedOutput := GroupOutput{
Meta: ImportMeta{
ImportedFrom: instanceName,
ReadOnly: true,
ImportTime: importTime,
},
Name: groupID,
Reviewers: expectedReviewers,
}
// Use reflect.DeepEqual for a robust comparison of the structs
if !reflect.DeepEqual(result, expectedOutput) {
t.Errorf("Output JSON does not match expected.\nGot: %+v\nWant: %+v", result, expectedOutput)
}
}
func TestGetAllGroups(t *testing.T) {
// Mock the OBS API server
mockResponse := `<?xml version="1.0" encoding="utf-8"?>
<groups>
<group groupid="mock-group-1"/>
<group groupid="mock-group-2"/>
</groups>`
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Verify the request path
if r.URL.Path != "/group" {
t.Errorf("Expected path /group, got %s", r.URL.Path)
}
// Verify method
if r.Method != "GET" {
t.Errorf("Expected method GET, got %s", r.Method)
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(mockResponse))
}))
defer server.Close()
// Initialize client pointing to mock server
client, err := common.NewObsClient(server.URL)
if err != nil {
t.Fatalf("Failed to create client: %v", err)
}
groups, err := getAllGroups(client)
if err != nil {
t.Fatalf("GetAllGroups failed: %v", err)
}
if len(groups) != 2 {
t.Errorf("Expected 2 groups, got %d", len(groups))
}
if groups[0] != "mock-group-1" {
t.Errorf("Expected first group to be mock-group-1, got %s", groups[0])
}
}
func TestGroupsListDirectoryFormat(t *testing.T) {
// Test <directory> format with name attribute
dirXML := `<?xml version="1.0" encoding="utf-8"?>
<directory>
<entry name="group-a"/>
<entry name="group-b"/>
<entry name="group-c"/>
</directory>`
var groupsAlt groupsListAlt
err := xml.Unmarshal([]byte(dirXML), &groupsAlt)
if err != nil {
t.Fatalf("Failed to unmarshal directory XML: %v", err)
}
if len(groupsAlt.Entries) != 3 {
t.Errorf("Expected 3 entries, got %d", len(groupsAlt.Entries))
}
expected := []string{"group-a", "group-b", "group-c"}
for i, e := range groupsAlt.Entries {
if e.getName() != expected[i] {
t.Errorf("Expected entry %s, got %s", expected[i], e.getName())
}
}
}
func TestSanitizeFilename(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"simple", "simple"},
{"group/name", "group_name"},
{"project:group", "project_group"},
{"group with spaces", "group_with_spaces"},
{"group/name:space", "group_name_space"},
{"", ""},
{"multiple///slashes", "multiple___slashes"},
}
for _, tc := range tests {
result := sanitizeFilename(tc.input)
if result != tc.expected {
t.Errorf("sanitizeFilename(%q) = %q, expected %q", tc.input, result, tc.expected)
}
}
}