forked from jzerebecki/autogits
Compare commits
578 Commits
build-dire
...
ibs_state
| Author | SHA256 | Date | |
|---|---|---|---|
| 4da7a62404 | |||
| 1f6ec2a1c3 | |||
| e43833c2ed | |||
| 98d701bfe8 | |||
| 1c63899e49 | |||
| 2c891ded6a | |||
| ef61736d70 | |||
| 57a4860d67 | |||
| 94d83ccf80 | |||
|
|
5440476d10 | ||
| ba20810c99 | |||
|
|
f5a32792e0 | ||
| b4945a8ae4 | |||
| 1451266ddc | |||
| bd618983e9 | |||
| c7840ddd47 | |||
| 3f110ce5f6 | |||
| 3a2c87b4af | |||
| d0056ed461 | |||
| e5e1b5d9a5 | |||
|
|
96a908d0be | ||
|
|
6aaff89179 | ||
| 7ec663db27 | |||
| 3b83ba96e4 | |||
| 3a0445e857 | |||
| ef5db8ca28 | |||
| 10f74f681d | |||
| b514f9784c | |||
| 18435a8820 | |||
| 5f646f4520 | |||
| 88aa8c32fd | |||
| 99d27a48ff | |||
|
|
7832ef90c0 | ||
|
|
4691747038 | ||
| af2ff0bdd2 | |||
| 5669083388 | |||
|
|
cb9131a5dd | ||
| 582df2555b | |||
| 4179fb4c7b | |||
| 7f6bd4bc32 | |||
| f1b807fbf6 | |||
| e5441bf489 | |||
| 8078ca7d4d | |||
| 8721aa2c14 | |||
| 18cb2d7135 | |||
| 40317bf527 | |||
| dc78b352b3 | |||
| b8740047c9 | |||
| 788028a426 | |||
| bb86f377b6 | |||
| b30c393ec0 | |||
| defe379e62 | |||
| 9f405c2022 | |||
| 75760efbc1 | |||
| dd4098cdc6 | |||
| 31299b2d61 | |||
| 0915e6c35f | |||
| 85927ad76d | |||
| 711c2d677a | |||
| 92162f7d89 | |||
| ec0eefb868 | |||
| 5ae2bd8fd7 | |||
| 45f2b55e53 | |||
| c83a3a454f | |||
| 1cf7dd79b3 | |||
| 29607f922c | |||
|
|
bf8d1196ba | ||
| 91d22f7eea | |||
|
|
0456fc114e | ||
|
|
ff68cc8200 | ||
| 913b8c8a4b | |||
| e1825dc658 | |||
| 59965e7b5c | |||
| 24a4a592a7 | |||
| d3d9d66797 | |||
| 7a2f7a6ee7 | |||
| 34a3a4795b | |||
| bb5daebdfa | |||
| 70bba5e239 | |||
| 5793391586 | |||
| d923db3f87 | |||
| fc4547f9a9 | |||
| 6fa57fc4d4 | |||
| 82d4e2ed5d | |||
| 8920644792 | |||
| 06772ca662 | |||
|
|
a814c4ce24 | ||
|
|
aadc7f9d41 | ||
|
|
0598448fdb | ||
|
|
2c174b687a | ||
|
|
4826d0869a | ||
| 643e0d2522 | |||
| 603e5c67e7 | |||
| 17b67b8133 | |||
| 9bc290af01 | |||
| c16d1f9940 | |||
|
|
3e1b3c5c84 | ||
|
|
fc4899b75a | ||
| 0b479bcbfa | |||
| 9f9a4660e9 | |||
| cb2f17a287 | |||
| 3125df4d6a | |||
| 06600813b4 | |||
| 3b510182d6 | |||
|
|
d1bcc222ce | ||
|
|
b632952f62 | ||
|
|
1b90299d94
|
||
| 708add1017 | |||
| 712349d638 | |||
| ba5a42dd29 | |||
| 53cf2c8bad | |||
| 868c28cd5a | |||
| 962c4b2562 | |||
| 57cb251dbc | |||
| 75c4fada50 | |||
| 7d13e586ac | |||
| 7729b845b0 | |||
| c662b2fdbf | |||
|
|
4cedb37da4 | ||
|
|
fe519628c8 | ||
|
|
ff18828692 | ||
| 6337ef7e50 | |||
| e9992d2e99 | |||
| aac218fc6d | |||
| 139f40fce3 | |||
| c44d34fdbe | |||
| 23be3df1fb | |||
| 68b67c6975 | |||
| 478a3a140a | |||
| df4da87bfd | |||
| b19d301d95 | |||
| 9532aa897c | |||
| f942909ac7 | |||
| 7f98298b89 | |||
| c6ee055cb4 | |||
| 58e5547a91 | |||
| c2709e1894 | |||
| 7790e5f301 | |||
| 2620aa3ddd | |||
| 59a47cd542 | |||
| a0c51657d4 | |||
| f0b053ca07 | |||
| 844ec8a87b | |||
| 6ee8fcc597 | |||
| 1220799e57 | |||
| 86a176a785 | |||
| bb9e9a08e5 | |||
| edd8c67fc9 | |||
| 877e93c9bf | |||
| 51403713be | |||
| cc69a9348c | |||
| 5b5bb9a5bc | |||
|
|
2f39fc9836 | ||
| f959684540 | |||
| 18f7ed658a | |||
| c05fa236d1 | |||
| c866303696 | |||
| e806d6ad0d | |||
| abf8aa58fc | |||
| 4f132ec154 | |||
| 86a7fd072e | |||
| 5f5e7d98b5 | |||
| e8738c9585 | |||
| 2f18adaa67 | |||
| b7f5c97de1 | |||
| 09001ce01b | |||
| 37c9cc7a57 | |||
| 362e481a09 | |||
| 38f4c44fd0 | |||
| 605d3dee06 | |||
| 6f26bcdccc | |||
| fffdf4fad3 | |||
| f6d2239f4d | |||
| 913fb7c046 | |||
| 79318dc169 | |||
| 377ed1c37f | |||
| 51b0487b29 | |||
| 49e32c0ab1 | |||
| 01e4f5f59e | |||
| 19d9fc5f1e | |||
| c4e184140a | |||
| 56c492ccdf | |||
| 3a6009a5a3 | |||
| 2c4d25a5eb | |||
| 052ab37412 | |||
| 925f546272 | |||
| 71fd32a707 | |||
| 581131bdc8 | |||
| 495ed349ea | |||
| 350a255d6e | |||
| e3087e46c2 | |||
| ae6b638df6 | |||
| 2c73cc683a | |||
| 32adfb1111 | |||
| fe8fcbae96 | |||
| 5756f7ceea | |||
| 2be0f808d2 | |||
| 7a0f651eaf | |||
| 2e47104b17 | |||
| 76bfa612c5 | |||
| 71aa0813ad | |||
| cc675c1b24 | |||
| 44e4941120 | |||
| 86acfa6871 | |||
| 7f09b2d2d3 | |||
| f3a37f1158 | |||
| 9d6db86318 | |||
| e11993c81f | |||
| 4bd259a2a0 | |||
| 162ae11cdd | |||
| 8431b47322 | |||
| 3ed5ecc3f0 | |||
| d08ab3efd6 | |||
| a4f6628e52 | |||
| 25073dd619 | |||
| 4293181b4e | |||
| 551a4ef577 | |||
| 6afb18fc58 | |||
| f310220261 | |||
| ef7c0c1cea | |||
| 27230fa03b | |||
| c52d40b760 | |||
| d3ba579a8b | |||
| 9ef8209622 | |||
| ba66dd868e | |||
| 17755fa2b5 | |||
| f94d3a8942 | |||
| 20e1109602 | |||
| c25d3be44e | |||
| 8db558891a | |||
| 0e06ba5993 | |||
| 736769d630 | |||
| 93c970d0dd | |||
| 5544a65947 | |||
| 918723d57b | |||
| a418b48809 | |||
|
55846562c1
|
|||
|
95c7770cad
|
|||
|
1b900e3202
|
|||
|
d083acfd1c
|
|||
|
244160e20e
|
|||
| ed2847a2c6 | |||
| 1457caa64b | |||
| b9a38c1724 | |||
| 74edad5d3e | |||
|
|
e5cad365ee
|
||
|
|
53851ba10f
|
||
|
|
056e5208c8
|
||
|
|
af142fdb15
|
||
|
|
5ce92beb52
|
||
|
|
ae379ec408
|
||
| 458837b007 | |||
| a3feab6f7e | |||
| fa647ab2d8 | |||
| 19902813b5 | |||
| 23a7f310c5 | |||
| 58d1f2de91 | |||
| d623844411 | |||
| 04825b552e | |||
| ca7966f3e0 | |||
| 0c47ca4d32 | |||
| 7bad8eb5a9 | |||
| c2c60b77e5 | |||
| 76b5a5dc0d | |||
| 58da491049 | |||
| 626bead304 | |||
| 30bac996f4 | |||
| 9adc718b6f | |||
| 070f45bc25 | |||
| d061f29699 | |||
| f6fd96881d | |||
| 2be785676a | |||
| 1b9ee2d46a | |||
| b7bbafacf8 | |||
| 240896f101 | |||
| a7b326fceb | |||
| 76ed03f86f | |||
| 1af2f53755 | |||
| 0de9071f92 | |||
| 855faea659 | |||
| dbd581ffef | |||
| 1390225614 | |||
| a03491f75c | |||
| 2092fc4f42 | |||
| d2973f4792 | |||
| 58022c6edc | |||
| 994e6b3ca2 | |||
| 6414336ee6 | |||
| 1104581eb6 | |||
| 6ad110e5d3 | |||
| e39ce302b8 | |||
| 3f216dc275 | |||
| 8af7e58534 | |||
| 043673d9ac | |||
| 73737be16a | |||
| 1d3ed81ac5 | |||
| 49c4784e70 | |||
| be15c86973 | |||
| 72857db561 | |||
| faf53aaae2 | |||
| 9e058101f0 | |||
|
|
4ae45d9913
|
||
| 56cf8293ed | |||
| fd5b3598bf | |||
| 9dd5a57b81 | |||
| 1cd385e227 | |||
| 3c20eb567b | |||
| ff7df44d37 | |||
| 1a19873f77 | |||
| 6a09bf021e | |||
| f2089f99fc | |||
| 10ea3a8f8f | |||
| 9faa6ead49 | |||
| 29cce5741a | |||
| 804e542c3f | |||
| 72899162b0 | |||
| 168a419bbe | |||
| 6a71641295 | |||
| 5addde0a71 | |||
| 90ea1c9463 | |||
| a4fb3e6151 | |||
| e2abbfcc63 | |||
| f6cb35acca | |||
| f4386c3d12 | |||
| f8594af8c6 | |||
| b8ef69a5a7 | |||
| c980b9f84d | |||
| 4651440457 | |||
| 7d58882ed8 | |||
| e90ba95869 | |||
| 1015e79026 | |||
| 833cb8b430 | |||
| a882ae283f | |||
| 305e90b254 | |||
| c80683182d | |||
| 51cd4da97b | |||
| cf71fe49d6 | |||
| 85a9a81804 | |||
| 72b979b587 | |||
| bb4350519b | |||
| 62658e23a7 | |||
| 6a1f92af12 | |||
| 24ed21ce7d | |||
| 46a187a60e | |||
| e0c7ea44ea | |||
| f013180c4b | |||
| b96b784b38 | |||
| 6864e95404 | |||
| 0ba4652595 | |||
| 8d0047649a | |||
| 2f180c264e | |||
| 7b87c4fd73 | |||
| 7d2233dd4a | |||
| c30ae5750b | |||
| ea2134c6e9 | |||
| b22f418595 | |||
| c4c9a16e7f | |||
| 5b1e6941c2 | |||
| 923bcd89db | |||
| e96f4d343b | |||
| bcb63fe1e9 | |||
| f4e78e53d3 | |||
| 082db173f3 | |||
| 7e055c3169 | |||
| 7e59e527d8 | |||
| 518845b3d8 | |||
| b091e0e98d | |||
| cedb7c0e76 | |||
| 7209f9f519 | |||
| bd5482d54e | |||
| bc95d50378 | |||
| fff996b497 | |||
| 2b67e6d80e | |||
| 5a875c19a0 | |||
| 538698373a | |||
| 84b8ca65ce | |||
| a02358e641 | |||
| 33c9bffc2e | |||
| 4894c0d90a | |||
| 090c291f8a | |||
| 42cedb6267 | |||
| f7229dfaf9 | |||
|
|
933ca9a3db
|
||
| 390cb89702 | |||
| 6cbeaef6f2 | |||
| d146fb8c4e | |||
| 7e78ee83c1 | |||
| 17e925bfd7 | |||
| 878df15e58 | |||
| c84af6286d | |||
| d2cbb8fd34 | |||
| 8436a49c5d | |||
| 106e36d6bf | |||
| 0ec4986163 | |||
| fb7f6adc98 | |||
| 231f29b065 | |||
| 3f3645a453 | |||
| 42e2713cd8 | |||
| 3d24dce5c0 | |||
| 0cefb45d8a | |||
| ddbb824006 | |||
| 69dac4ec31 | |||
| b7e03ab465 | |||
| 76aec3aabb | |||
| 19fb7e277b | |||
| 51261f1bc1 | |||
| 949810709d | |||
| c012570e89 | |||
| 44a3b15a7d | |||
| c5db1c83a7 | |||
| 9f0909621b | |||
| b3914b04bd | |||
| b43a19189e | |||
| 01b665230e | |||
| 1a07d4c541 | |||
| 22e44dff47 | |||
| f9021d08b9 | |||
| 7a0394e51b | |||
| 518bc15696 | |||
| 51873eb048 | |||
| 4f33ce979c | |||
| 7cc4db2283 | |||
| 4d9e2f8cab | |||
| ed4f27a19e | |||
| e438b5b064 | |||
| 885bb7e537 | |||
| 977d75f6e9 | |||
| 42a9ee48e0 | |||
| 9333e5c3da | |||
| 5e29c88dc8 | |||
| 4f0f101620 | |||
| 253f009da3 | |||
| 5e66a14fa9 | |||
| e79122e494 | |||
| 0b4b1a4e21 | |||
| 0019546e30 | |||
| 6438a8625a | |||
| 3928fa6429 | |||
| e92ac4a592 | |||
| a1520ebfb0 | |||
| c8d65a3ae5 | |||
| b849a72f31 | |||
| 568a2f3df8 | |||
| 30c8b2fe57 | |||
| 69b0f9a5ed | |||
| a283d4f26f | |||
| af898a6b8d | |||
| b89cdb7664 | |||
| d37bfaa9d3 | |||
| 90cca05b31 | |||
| 7c229500c1 | |||
| 290424c4a7 | |||
| 703fa101a4 | |||
| 66e4982e2d | |||
| 09b1c415dd | |||
| 629b941558 | |||
| aa50481c00 | |||
| bc714ee22d | |||
| b8cc0357a7 | |||
| aed0ac3ee9 | |||
| cca3575596 | |||
| 69dcebcf74 | |||
|
e5d07f0ce6
|
|||
|
df9478a920
|
|||
| 7da9daddd5 | |||
| cd0c3bc759 | |||
| af096af507 | |||
| d150c66427 | |||
| 3bef967023 | |||
| 9c3658b33e | |||
| 6968cbc942 | |||
| 2cb7a065a9 | |||
| 35058623a7 | |||
| 24fe165c46 | |||
| 1498438fee | |||
| 4653904ded | |||
| bd87bf8ce3 | |||
| 364c3f4ab7 | |||
| fd8b7f1bee | |||
| da32adb16b | |||
| 1b5a0ad0c8 | |||
| e78fdf4a09 | |||
| 0564a50fb5 | |||
| 4f7db36123 | |||
| 41d536ea1b | |||
| 91d915cc28 | |||
| c7a300119e | |||
| c5c3e1c115 | |||
| c93788d0ee | |||
| 1e46f8d0ab | |||
| 9963ae90ef | |||
| a9225bbd76 | |||
| 801fff6e22 | |||
| b4b0d075be | |||
| 16c2eb7090 | |||
| 3264ad1589 | |||
| cb64635aea | |||
| aeb4c20744 | |||
| da1df24666 | |||
| 6b3c613f14 | |||
| eb997e1ae9 | |||
| f52d72e04a | |||
| 23e2566843 | |||
| 0d0fcef7ac | |||
| 62a597718b | |||
| 327cb4ceaf | |||
| aac475ad16 | |||
| 046a60a6ed | |||
| dcf964bf7a | |||
| bff5f1cab7 | |||
| 6d1ef184e0 | |||
| e30d366f2f | |||
| 4a2fe06f05 | |||
| 210e7588f1 | |||
| 72b100124d | |||
| 996d36aaa8 | |||
| 82b5b105b1 | |||
| 248ec4d03c | |||
| faa21f5453 | |||
| 21c4a7c1e0 | |||
| f3f76e7d5b | |||
| e341b630a2 | |||
| 58532b9b60 | |||
| a697ccd0ca | |||
| 4bafe0b4ef | |||
| 7af2092ae1 | |||
| 32374f76c1 | |||
| 9403b563f6 | |||
| bd492f8d92 | |||
| fbc84d551d | |||
| 874a120f88 | |||
| 199396c210 | |||
| f0de3ad54a | |||
| bfeac63c57 | |||
| d65f37739c | |||
| 5895e3d02c | |||
| 0e036b5ec6 | |||
| 1d1602852c | |||
| 9b5013ee45 | |||
| ed815c3ad1 | |||
| 8645063e8d | |||
| 2d044d5664 | |||
| 51ba81f257 | |||
| bb7a247f66 | |||
| c1f71253a4 | |||
| 96e1c26600 | |||
| 9d9964df11 | |||
| e257b113b9 | |||
| 11e0bbaed1 | |||
| fb430d8c76 | |||
| 7ed2a7082d | |||
| ba7686189e | |||
| 9dcd25b69a | |||
| 881fad36a0 | |||
| 29906e22d2 | |||
| d89c77e22d | |||
| f91c61cd20 | |||
| 06aef50047 | |||
| 52a5cdea94 | |||
| d3f1b36676 | |||
| 5ea5f05b02 | |||
| 5877081280 | |||
| c4ce974ddf | |||
| 65c718e73b | |||
| a8e6c175c0 | |||
| 044416cd2a | |||
| 009cc88d54 | |||
| da1f4f4fa0 | |||
| cfad21e1a3 | |||
| 5eb54d40e0 | |||
| 80ff036acb | |||
|
|
2ed4f0d05f
|
||
|
|
23ed9b830d
|
||
|
|
4604aaeeba
|
||
|
|
2dfe973c51
|
||
| b7625cd4c4 | |||
| 12e7a071d9 | |||
| 6409741a12 |
33
.gitea/workflows/go-generate-check.yaml
Normal file
33
.gitea/workflows/go-generate-check.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
name: go-generate-check
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
paths:
|
||||
- '**.go'
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**.go'
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-check:
|
||||
name: go-generate-check
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: git add -N .; git diff
|
||||
- run: |
|
||||
status=$(git status --short)
|
||||
if [[ -n "$status" ]]; then
|
||||
echo -e "$status"
|
||||
echo "Please commit the differences from running: go generate"
|
||||
false
|
||||
fi
|
||||
24
.gitea/workflows/go-generate-push.yaml
Normal file
24
.gitea/workflows/go-generate-push.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
name: go-generate-push
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-push:
|
||||
name: go-generate-push
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: |
|
||||
host=${{ gitea.server_url }}
|
||||
host=${host#https://}
|
||||
echo $host
|
||||
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
|
||||
git config user.name "Gitea Actions"
|
||||
git config user.email "gitea_noreply@opensuse.org"
|
||||
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go generate"; git push origin HEAD:${{ gitea.ref }} )'
|
||||
- run: git log -p FETCH_HEAD...HEAD
|
||||
- run: git log --numstat FETCH_HEAD...HEAD
|
||||
33
.gitea/workflows/go-vendor-check.yaml
Normal file
33
.gitea/workflows/go-vendor-check.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
name: go-vendor-check
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
paths:
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**.mod'
|
||||
- '**.sum'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-check:
|
||||
name: go-vendor-check
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go mod download
|
||||
- run: go mod vendor
|
||||
- run: go mod verify
|
||||
- run: git add -N .; git diff
|
||||
- run: go mod tidy -diff || true
|
||||
- run: |
|
||||
status=$(git status --short)
|
||||
if [[ -n "$status" ]]; then
|
||||
echo -e "$status"
|
||||
echo "Please commit the differences from running: go generate"
|
||||
false
|
||||
fi
|
||||
26
.gitea/workflows/go-vendor-push.yaml
Normal file
26
.gitea/workflows/go-vendor-push.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
name: go-generate-push
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
go-generate-push:
|
||||
name: go-generate-push
|
||||
container:
|
||||
image: registry.opensuse.org/devel/factory/git-workflow/containers/opensuse/bci/golang-extended:latest
|
||||
steps:
|
||||
- run: git clone --no-checkout --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} .
|
||||
- run: git fetch origin ${{ gitea.ref }}
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go mod download
|
||||
- run: go mod vendor
|
||||
- run: go mod verify
|
||||
- run: |
|
||||
host=${{ gitea.server_url }}
|
||||
host=${host#https://}
|
||||
echo $host
|
||||
git remote set-url origin "https://x-access-token:${{ secrets.GITEA_TOKEN }}@$host/${{ gitea.repository }}"
|
||||
git config user.name "Gitea Actions"
|
||||
git config user.email "gitea_noreply@opensuse.org"
|
||||
- run: 'git status --short; git status --porcelain=2|grep --quiet -v . || ( git add .;git commit -m "CI run result of: go mod vendor"; git push origin HEAD:${{ gitea.ref }} )'
|
||||
- run: go mod tidy -diff || true
|
||||
- run: git log -p FETCH_HEAD...HEAD
|
||||
- run: git log --numstat FETCH_HEAD...HEAD
|
||||
52
.gitea/workflows/t.yaml
Normal file
52
.gitea/workflows/t.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Integration tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
HOME: /var/lib/gitea-runner
|
||||
REPO_URL: http://src.opensuse.org//git-workflow/autogits.git
|
||||
|
||||
jobs:
|
||||
t:
|
||||
runs-on: linux-x86_64
|
||||
steps:
|
||||
- name: whoami
|
||||
run: whoami
|
||||
- name: pwd
|
||||
run: pwd
|
||||
- name: vars
|
||||
run: |
|
||||
set | grep GITEA_
|
||||
- name: Clone
|
||||
run: |
|
||||
git clone -q ${{ env.REPO_URL }}
|
||||
- name: Checkout
|
||||
run: |
|
||||
echo ${{ gitea.ref }}
|
||||
git fetch origin ${{ gitea.ref }}
|
||||
git checkout FETCH_HEAD
|
||||
working-directory: ./autogits
|
||||
- name: Prepare binaries
|
||||
run: make build
|
||||
working-directory: ./autogits
|
||||
- name: Prepare images
|
||||
run: make build
|
||||
working-directory: ./autogits/integration
|
||||
- name: Make sure the pod is down
|
||||
run: make down
|
||||
working-directory: ./autogits/integration
|
||||
- name: Start images
|
||||
run: make up
|
||||
working-directory: ./autogits/integration
|
||||
- name: Run tests
|
||||
run: py.test-3.11 -v tests
|
||||
working-directory: ./autogits/integration
|
||||
- name: Make sure the pod is down
|
||||
if: always()
|
||||
run: make down
|
||||
working-directory: ./autogits/integration
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,5 +1,8 @@
|
||||
mock
|
||||
node_modules
|
||||
*.obscpio
|
||||
autogits-tmp.tar.zst
|
||||
*.osc
|
||||
*.conf
|
||||
!/integration/**/*.conf
|
||||
/integration/gitea-data
|
||||
/integration/gitea-logs
|
||||
/integration/rabbitmq-data
|
||||
/integration/workflow-pr-repos
|
||||
__pycache__/
|
||||
|
||||
4
Makefile
Normal file
4
Makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
|
||||
|
||||
build:
|
||||
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done
|
||||
19
README.md
19
README.md
@@ -5,11 +5,15 @@ The bots that drive Git Workflow for package management
|
||||
|
||||
* devel-importer -- helper to import an OBS devel project into a Gitea organization
|
||||
* gitea-events-rabbitmq-publisher -- takes all events from a Gitea organization (webhook) and publishes it on a RabbitMQ instance
|
||||
* gitea-status-proxy -- allows bots without code owner permission to set Gitea's commit status
|
||||
* group-review -- group review proxy
|
||||
* hujson -- translates JWCC (json with commas and comments) to Standard JSON
|
||||
* obs-forward-bot -- forwards PR as OBS sr (TODO)
|
||||
* obs-staging-bot -- build bot for a PR
|
||||
* obs-status-service -- report build status of an OBS project as an SVG
|
||||
* workflow-pr -- keeps PR to _ObsPrj consistent with a PR to a package update
|
||||
* workflow-direct -- update _ObsPrj based on direct pushes and repo creations/removals from organization
|
||||
* staging-utils -- review tooling for PR
|
||||
* staging-utils -- review tooling for PR (TODO)
|
||||
- list PR
|
||||
- merge PR
|
||||
- split PR
|
||||
@@ -19,7 +23,18 @@ The bots that drive Git Workflow for package management
|
||||
Bugs
|
||||
----
|
||||
|
||||
Report bugs to issue tracker at https://src.opensuse.org/adamm/autogits
|
||||
Report bugs to issue tracker at https://src.opensuse.org/git-workflow/autogits
|
||||
|
||||
|
||||
Build Status
|
||||
------------
|
||||
|
||||
Devel project build status (`main` branch):
|
||||
|
||||

|
||||
|
||||
`staging` branch build status:
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
16
_service
16
_service
@@ -1,16 +0,0 @@
|
||||
<services>
|
||||
<!-- workaround, go_modules needs a tar and obs_scm doesn't take file://. -->
|
||||
<service name="roast" mode="manual">
|
||||
<param name="target">.</param>
|
||||
<param name="reproducible">true</param>
|
||||
<param name="outfile">autogits-tmp.tar.zst</param>
|
||||
<param name="exclude">autogits-tmp.tar.zst</param>
|
||||
</service>
|
||||
<service name="go_modules" mode="manual">
|
||||
<param name="basename">./</param>
|
||||
<param name="compression">zst</param>
|
||||
<param name="subdir">gitea-events-rabbitmq-publisher</param>
|
||||
<param name="vendorname">vendor-gitea-events-rabbitmq-publisher</param>
|
||||
</service>
|
||||
</services>
|
||||
|
||||
277
autogits.spec
277
autogits.spec
@@ -17,60 +17,309 @@
|
||||
|
||||
|
||||
Name: autogits
|
||||
Version: 0
|
||||
Version: 1
|
||||
Release: 0
|
||||
Summary: GitWorkflow utilities
|
||||
License: GPL-2.0-or-later
|
||||
URL: https://src.opensuse.org/adamm/autogits
|
||||
Source1: vendor-gitea-events-rabbitmq-publisher.tar.zst
|
||||
BuildRequires: golang-packaging
|
||||
#!RemoteAsset: git+https://src.suse.de/adrianSuSE/autogits#ibs_state
|
||||
Source0: %name-%version.tar.xz
|
||||
BuildRequires: git
|
||||
BuildRequires: systemd-rpm-macros
|
||||
BuildRequires: zstd
|
||||
BuildRequires: go
|
||||
%{?systemd_ordering}
|
||||
|
||||
%description
|
||||
Git Workflow tooling and utilities enabling automated handing of OBS projects
|
||||
as git repositories
|
||||
|
||||
%package -n gitea-events-rabbitmq-publisher
|
||||
|
||||
%package devel-importer
|
||||
Summary: Imports devel projects from obs to git
|
||||
|
||||
%description -n autogits-devel-importer
|
||||
Command-line tool to import devel projects from obs to git
|
||||
|
||||
|
||||
%package doc
|
||||
Summary: Common documentation files
|
||||
BuildArch: noarch
|
||||
|
||||
%description -n autogits-doc
|
||||
Common documentation files
|
||||
|
||||
|
||||
%package gitea-events-rabbitmq-publisher
|
||||
Summary: Publishes Gitea webhook data via RabbitMQ
|
||||
|
||||
%description -n gitea-events-rabbitmq-publisher
|
||||
%description gitea-events-rabbitmq-publisher
|
||||
Listens on an HTTP socket and publishes Gitea events on a RabbitMQ instance
|
||||
with a topic
|
||||
<scope>.src.$organization.$webhook_type.[$webhook_action_type]
|
||||
|
||||
|
||||
%package gitea-status-proxy
|
||||
Summary: Proxy for setting commit status in Gitea
|
||||
|
||||
%description gitea-status-proxy
|
||||
Setting commit status requires code write access token. This proxy
|
||||
is middleware that delegates status setting without access to other APIs
|
||||
|
||||
%package group-review
|
||||
Summary: Reviews of groups defined in ProjectGit
|
||||
|
||||
%description group-review
|
||||
Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
|
||||
%package obs-forward-bot
|
||||
Summary: obs-forward-bot
|
||||
|
||||
%description obs-forward-bot
|
||||
|
||||
|
||||
%package obs-staging-bot
|
||||
Summary: Build a PR against a ProjectGit, if review is requested
|
||||
|
||||
%description obs-staging-bot
|
||||
Build a PR against a ProjectGit, if review is requested.
|
||||
|
||||
|
||||
%package obs-status-service
|
||||
Summary: Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
%description obs-status-service
|
||||
Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
|
||||
%package utils
|
||||
Summary: HuJSON to JSON parser
|
||||
Provides: hujson
|
||||
Provides: /usr/bin/hujson
|
||||
|
||||
%description utils
|
||||
HuJSON to JSON parser, using stdin -> stdout pipe
|
||||
|
||||
|
||||
%package workflow-direct
|
||||
Summary: Keep ProjectGit in sync for a devel project
|
||||
Requires: openssh-clients
|
||||
Requires: git-core
|
||||
|
||||
%description workflow-direct
|
||||
Keep ProjectGit in sync with packages in the organization of a devel project
|
||||
|
||||
|
||||
%package workflow-pr
|
||||
Summary: Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
Requires: openssh-clients
|
||||
Requires: git-core
|
||||
|
||||
%description workflow-pr
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
|
||||
%prep
|
||||
cp -r /home/abuild/rpmbuild/SOURCES/* ./
|
||||
cd gitea-events-rabbitmq-publisher && tar x --zstd -f %{SOURCE1}
|
||||
|
||||
%build
|
||||
go build \
|
||||
-C devel-importer \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C utils/hujson \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C utils/maintainer-update \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea-events-rabbitmq-publisher \
|
||||
-mod=vendor \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea_status_proxy \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C group-review \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-forward-bot \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-staging-bot \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C obs-status-service \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C workflow-direct \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C workflow-pr \
|
||||
-buildmode=pie
|
||||
|
||||
%check
|
||||
go test -C common -v
|
||||
go test -C group-review -v
|
||||
go test -C obs-staging-bot -v
|
||||
go test -C obs-status-service -v
|
||||
go test -C workflow-direct -v
|
||||
go test -C utils/maintainer-update
|
||||
# TODO build fails
|
||||
#go test -C workflow-pr -v
|
||||
|
||||
%install
|
||||
install -D -m0755 devel-importer/devel-importer %{buildroot}%{_bindir}/devel-importer
|
||||
install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher %{buildroot}%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
install -D -m0755 gitea_status_proxy/gitea_status_proxy %{buildroot}%{_bindir}/gitea_status_proxy
|
||||
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
|
||||
install -D -m0644 systemd/group-review@.service %{buildroot}%{_unitdir}/group-review@.service
|
||||
install -D -m0755 obs-forward-bot/obs-forward-bot %{buildroot}%{_bindir}/obs-forward-bot
|
||||
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
|
||||
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
|
||||
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
|
||||
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
|
||||
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
|
||||
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
|
||||
install -D -m0644 systemd/workflow-direct.target %{buildroot}%{_unitdir}/workflow-direct.target
|
||||
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
|
||||
install -D -m0644 systemd/workflow-pr@.service %{buildroot}%{_unitdir}/workflow-pr@.service
|
||||
install -D -m0644 systemd/workflow-pr.target %{buildroot}%{_unitdir}/workflow-pr.target
|
||||
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
|
||||
install -D -m0755 utils/maintainer-update/maintainer-update %{buildroot}%{_bindir}/maintainer-update
|
||||
|
||||
%pre -n gitea-events-rabbitmq-publisher
|
||||
%pre gitea-events-rabbitmq-publisher
|
||||
%service_add_pre gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%post -n gitea-events-rabbitmq-publisher
|
||||
%post gitea-events-rabbitmq-publisher
|
||||
%service_add_post gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%preun -n gitea-events-rabbitmq-publisher
|
||||
%preun gitea-events-rabbitmq-publisher
|
||||
%service_del_preun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%postun -n gitea-events-rabbitmq-publisher
|
||||
%postun gitea-events-rabbitmq-publisher
|
||||
%service_del_postun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%files -n gitea-events-rabbitmq-publisher
|
||||
%pre group-review
|
||||
%service_add_pre group-review@.service
|
||||
|
||||
%post group-review
|
||||
%service_add_post group-review@.service
|
||||
|
||||
%preun group-review
|
||||
%service_del_preun group-review@.service
|
||||
|
||||
%postun group-review
|
||||
%service_del_postun group-review@.service
|
||||
|
||||
%pre obs-staging-bot
|
||||
%service_add_pre obs-staging-bot.service
|
||||
|
||||
%post obs-staging-bot
|
||||
%service_add_post obs-staging-bot.service
|
||||
|
||||
%preun obs-staging-bot
|
||||
%service_del_preun obs-staging-bot.service
|
||||
|
||||
%postun obs-staging-bot
|
||||
%service_del_postun obs-staging-bot.service
|
||||
|
||||
%pre obs-status-service
|
||||
%service_add_pre obs-status-service.service
|
||||
|
||||
%post obs-status-service
|
||||
%service_add_post obs-status-service.service
|
||||
|
||||
%preun obs-status-service
|
||||
%service_del_preun obs-status-service.service
|
||||
|
||||
%postun obs-status-service
|
||||
%service_del_postun obs-status-service.service
|
||||
|
||||
%pre workflow-direct
|
||||
%service_add_pre workflow-direct.target
|
||||
|
||||
%post workflow-direct
|
||||
%service_add_post workflow-direct.target
|
||||
|
||||
%preun workflow-direct
|
||||
%service_del_preun workflow-direct.target
|
||||
|
||||
%postun workflow-direct
|
||||
%service_del_postun workflow-direct.target
|
||||
|
||||
%pre workflow-pr
|
||||
%service_add_pre workflow-pr.target
|
||||
|
||||
%post workflow-pr
|
||||
%service_add_post workflow-pr.target
|
||||
|
||||
%preun workflow-pr
|
||||
%service_del_preun workflow-pr.target
|
||||
|
||||
%postun workflow-pr
|
||||
%service_del_postun workflow-pr.target
|
||||
|
||||
%files devel-importer
|
||||
%license COPYING
|
||||
%doc devel-importer/README.md
|
||||
%{_bindir}/devel-importer
|
||||
|
||||
%files doc
|
||||
%license COPYING
|
||||
%doc doc/README.md
|
||||
%doc doc/workflows.md
|
||||
|
||||
%files gitea-events-rabbitmq-publisher
|
||||
%license COPYING
|
||||
%doc gitea-events-rabbitmq-publisher/README.md
|
||||
%{_bindir}/gitea-events-rabbitmq-publisher
|
||||
%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%changelog
|
||||
%files gitea-status-proxy
|
||||
%license COPYING
|
||||
%{_bindir}/gitea_status_proxy
|
||||
|
||||
%files group-review
|
||||
%license COPYING
|
||||
%doc group-review/README.md
|
||||
%{_bindir}/group-review
|
||||
%{_unitdir}/group-review@.service
|
||||
|
||||
%files obs-forward-bot
|
||||
%license COPYING
|
||||
%{_bindir}/obs-forward-bot
|
||||
|
||||
%files obs-staging-bot
|
||||
%license COPYING
|
||||
%doc obs-staging-bot/README.md
|
||||
%{_bindir}/obs-staging-bot
|
||||
%{_unitdir}/obs-staging-bot.service
|
||||
|
||||
%files obs-status-service
|
||||
%license COPYING
|
||||
%doc obs-status-service/README.md
|
||||
%{_bindir}/obs-status-service
|
||||
%{_unitdir}/obs-status-service.service
|
||||
|
||||
%files utils
|
||||
%license COPYING
|
||||
%{_bindir}/hujson
|
||||
%{_bindir}/maintainer-update
|
||||
|
||||
%files workflow-direct
|
||||
%license COPYING
|
||||
%doc workflow-direct/README.md
|
||||
%{_bindir}/workflow-direct
|
||||
%{_unitdir}/workflow-direct@.service
|
||||
%{_unitdir}/workflow-direct.target
|
||||
|
||||
%files workflow-pr
|
||||
%license COPYING
|
||||
%doc workflow-pr/README.md
|
||||
%{_bindir}/workflow-pr
|
||||
%{_unitdir}/workflow-pr@.service
|
||||
%{_unitdir}/workflow-pr.target
|
||||
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
all: build
|
||||
|
||||
api.json:
|
||||
curl -o api.json https://src.opensuse.org/swagger.v1.json
|
||||
|
||||
gitea-generated/client/gitea_api_client.go:: api.json
|
||||
[ -d gitea-generated ] || mkdir gitea-generated
|
||||
podman run --rm -v $$(pwd):/api ghcr.io/go-swagger/go-swagger generate client -f /api/api.json -t /api/gitea-generated
|
||||
|
||||
api: gitea-generated/client/gitea_api_client.go mock_gitea_utils.go
|
||||
go generate
|
||||
|
||||
build: api
|
||||
go build
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
GitProjectName []string
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
}
|
||||
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
|
||||
func ReadConfig(reader io.Reader) (*ConfigFile, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error reading config data: %w", err)
|
||||
}
|
||||
|
||||
config := ConfigFile{}
|
||||
if err := json.Unmarshal(data, &config.GitProjectName); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing Git Project paths: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadConfigFile(filename string) (*ConfigFile, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadConfig(file)
|
||||
}
|
||||
|
||||
func ReadWorkflowConfig(gitea Gitea, git_project string) (*AutogitConfig, error) {
|
||||
hash := strings.Split(git_project, "#")
|
||||
branch := ""
|
||||
if len(hash) > 1 {
|
||||
branch = hash[1]
|
||||
}
|
||||
|
||||
a := strings.Split(hash[0], "/")
|
||||
prjGitRepo := DefaultGitPrj
|
||||
switch len(a) {
|
||||
case 1:
|
||||
case 2:
|
||||
prjGitRepo = a[1]
|
||||
default:
|
||||
return nil, fmt.Errorf("Missing org/repo in projectgit: %s", git_project)
|
||||
}
|
||||
|
||||
data, _, err := gitea.GetRepositoryFileContent(a[0], prjGitRepo, branch, "workflow.config")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching 'workflow.config': %w", err)
|
||||
}
|
||||
|
||||
var config AutogitConfig
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing config file: %w", err)
|
||||
}
|
||||
|
||||
config.GitProjectName = a[0] + "/" + prjGitRepo
|
||||
if len(branch) > 0 {
|
||||
config.GitProjectName = config.GitProjectName + "#" + branch
|
||||
}
|
||||
if len(config.Organization) < 1 {
|
||||
config.Organization = a[0]
|
||||
}
|
||||
log.Println(config)
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ResolveWorkflowConfigs(gitea Gitea, config *ConfigFile) (AutogitConfigs, error) {
|
||||
configs := make([]*AutogitConfig, 0, len(config.GitProjectName))
|
||||
for _, git_project := range config.GitProjectName {
|
||||
c, err := ReadWorkflowConfig(gitea, git_project)
|
||||
if err != nil {
|
||||
// can't sync, so ignore for now
|
||||
log.Println(err)
|
||||
} else {
|
||||
configs = append(configs, c)
|
||||
}
|
||||
}
|
||||
|
||||
return configs, nil
|
||||
}
|
||||
|
||||
func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *AutogitConfig {
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,925 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=git_utils.go -destination=mock/git_utils.go -typed
|
||||
|
||||
type GitSubmoduleLister interface {
|
||||
GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error)
|
||||
GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool)
|
||||
}
|
||||
|
||||
type GitStatusLister interface {
|
||||
GitStatus(cwd string) ([]GitStatusData, error)
|
||||
}
|
||||
|
||||
type Git interface {
|
||||
GitParseCommits(cwd string, commitIDs []string) (parsedCommits []GitCommit, err error)
|
||||
GitCatFile(cwd, commitId, filename string) (data []byte, err error)
|
||||
GetPath() string
|
||||
|
||||
GitBranchHead(gitDir, branchName string) (string, error)
|
||||
io.Closer
|
||||
|
||||
GitSubmoduleLister
|
||||
GitStatusLister
|
||||
|
||||
GitExecWithOutputOrPanic(cwd string, params ...string) string
|
||||
GitExecOrPanic(cwd string, params ...string)
|
||||
GitExec(cwd string, params ...string) error
|
||||
GitExecWithOutput(cwd string, params ...string) (string, error)
|
||||
}
|
||||
|
||||
type GitHandlerImpl struct {
|
||||
DebugLogger bool
|
||||
|
||||
GitPath string
|
||||
GitCommiter string
|
||||
GitEmail string
|
||||
}
|
||||
|
||||
func (s *GitHandlerImpl) GetPath() string {
|
||||
return s.GitPath
|
||||
}
|
||||
|
||||
type GitHandlerGenerator interface {
|
||||
CreateGitHandler(git_author, email, prjName string) (Git, error)
|
||||
ReadExistingPath(git_author, email, gitPath string) (Git, error)
|
||||
}
|
||||
|
||||
type GitHandlerGeneratorImpl struct{}
|
||||
|
||||
func (s *GitHandlerGeneratorImpl) CreateGitHandler(git_author, email, prj_name string) (Git, error) {
|
||||
gitPath, err := os.MkdirTemp("", prj_name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create temp dir: %w", err)
|
||||
}
|
||||
|
||||
if err = os.Chmod(gitPath, 0700); err != nil {
|
||||
return nil, fmt.Errorf("Cannot fix permissions of temp dir: %w", err)
|
||||
}
|
||||
|
||||
return s.ReadExistingPath(git_author, email, gitPath)
|
||||
}
|
||||
|
||||
func (*GitHandlerGeneratorImpl) ReadExistingPath(git_author, email, gitPath string) (Git, error) {
|
||||
git := &GitHandlerImpl{
|
||||
GitCommiter: git_author,
|
||||
GitPath: gitPath,
|
||||
}
|
||||
|
||||
return git, nil
|
||||
}
|
||||
|
||||
//func (h *GitHandler) ProcessBranchList() []string {
|
||||
// if h.HasError() {
|
||||
// return make([]string, 0)
|
||||
// }
|
||||
//
|
||||
// trackedBranches, err := os.ReadFile(path.Join(h.GitPath, DefaultGitPrj, TrackedBranchesFile))
|
||||
// if err != nil {
|
||||
// if errors.Is(err, os.ErrNotExist) {
|
||||
// trackedBranches = []byte("factory")
|
||||
// } else {
|
||||
// h.LogError("file error reading '%s' file in repo", TrackedBranchesFile)
|
||||
// h.Error = err
|
||||
// return make([]string, 0)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return strings.Split(string(trackedBranches), "\n")
|
||||
//}
|
||||
|
||||
type GitReference struct {
|
||||
Branch string
|
||||
Id string
|
||||
}
|
||||
|
||||
type GitReferences struct {
|
||||
refs []GitReference
|
||||
}
|
||||
|
||||
func (refs *GitReferences) addReference(id, branch string) {
|
||||
for _, ref := range refs.refs {
|
||||
if ref.Id == id && ref.Branch == branch {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
refs.refs = append(refs.refs, GitReference{Branch: branch, Id: id})
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitBranchHead(gitDir, branchName string) (string, error) {
|
||||
id, err := e.GitExecWithOutput(gitDir, "rev-list", "-1", branchName)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Can't find default remote branch: %s", branchName)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(id), nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) Close() error {
|
||||
if err := os.RemoveAll(e.GitPath); err != nil {
|
||||
return err
|
||||
}
|
||||
e.GitPath = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
type writeFunc func(data []byte) (int, error)
|
||||
|
||||
func (f writeFunc) Write(data []byte) (int, error) {
|
||||
return f(data)
|
||||
}
|
||||
|
||||
func (h writeFunc) UnmarshalText(text []byte) error {
|
||||
_, err := h.Write(text)
|
||||
return err
|
||||
}
|
||||
|
||||
func (h writeFunc) Close() error {
|
||||
_, err := h.Write(nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExecWithOutputOrPanic(cwd string, params ...string) string {
|
||||
out, err := e.GitExecWithOutput(cwd, params...)
|
||||
if err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExecOrPanic(cwd string, params ...string) {
|
||||
if err := e.GitExec(cwd, params...); err != nil {
|
||||
log.Panicln("git command failed:", params, "@", cwd, "err:", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitExec(cwd string, params ...string) error {
|
||||
_, err := e.GitExecWithOutput(cwd, params...)
|
||||
return err
|
||||
}
|
||||
|
||||
var ExtraGitParams []string
|
||||
|
||||
func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string, error) {
|
||||
cmd := exec.Command("/usr/bin/git", params...)
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
"GIT_AUTHOR_NAME=" + e.GitCommiter,
|
||||
"GIT_COMMITTER_NAME=" + e.GitCommiter,
|
||||
"EMAIL=not@exist@src.opensuse.org",
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
|
||||
}
|
||||
if len(ExtraGitParams) > 0 {
|
||||
cmd.Env = append(cmd.Env, ExtraGitParams...)
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdin = nil
|
||||
|
||||
if e.DebugLogger {
|
||||
log.Printf("git execute: %#v\n", cmd.Args)
|
||||
}
|
||||
out, err := cmd.CombinedOutput()
|
||||
if e.DebugLogger {
|
||||
log.Println(string(out))
|
||||
}
|
||||
if err != nil {
|
||||
if e.DebugLogger {
|
||||
log.Printf(" *** error: %v\n", err)
|
||||
}
|
||||
return "", fmt.Errorf("error executing: git %#v \n%s\n err: %w", cmd.Args, out, err)
|
||||
}
|
||||
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
type ChanIO struct {
|
||||
ch chan byte
|
||||
}
|
||||
|
||||
func (c *ChanIO) Write(p []byte) (int, error) {
|
||||
for _, b := range p {
|
||||
c.ch <- b
|
||||
}
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// read at least 1 byte, but don't block if nothing more in channel
|
||||
func (c *ChanIO) Read(data []byte) (idx int, err error) {
|
||||
var ok bool
|
||||
|
||||
data[idx], ok = <-c.ch
|
||||
if !ok {
|
||||
err = io.EOF
|
||||
return
|
||||
}
|
||||
idx++
|
||||
|
||||
for len(c.ch) > 0 && idx < len(data) {
|
||||
data[idx], ok = <-c.ch
|
||||
if !ok {
|
||||
err = io.EOF
|
||||
return
|
||||
}
|
||||
|
||||
idx++
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type GitMsg struct {
|
||||
hash string
|
||||
itemType string
|
||||
size int
|
||||
}
|
||||
|
||||
type GitCommit struct {
|
||||
Tree string
|
||||
Msg string
|
||||
}
|
||||
|
||||
type GitTreeEntry struct {
|
||||
name string
|
||||
mode int
|
||||
hash string
|
||||
|
||||
size int
|
||||
}
|
||||
|
||||
type GitTree struct {
|
||||
items []GitTreeEntry
|
||||
}
|
||||
|
||||
func (t *GitTreeEntry) isSubmodule() bool {
|
||||
return (t.mode & 0170000) == 0160000
|
||||
}
|
||||
|
||||
func (t *GitTreeEntry) isTree() bool {
|
||||
return (t.mode & 0170000) == 0040000
|
||||
}
|
||||
|
||||
func (t *GitTreeEntry) isBlob() bool {
|
||||
return !t.isTree() && !t.isSubmodule()
|
||||
}
|
||||
|
||||
func parseGitMsg(data <-chan byte) (GitMsg, error) {
|
||||
var id []byte = make([]byte, 64)
|
||||
var msgType []byte = make([]byte, 16)
|
||||
var size int
|
||||
|
||||
pos := 0
|
||||
for c := <-data; c != ' '; c = <-data {
|
||||
if (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') {
|
||||
id[pos] = c
|
||||
pos++
|
||||
} else {
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object hash parse '%c' at %d", c, pos)
|
||||
}
|
||||
}
|
||||
id = id[:pos]
|
||||
|
||||
pos = 0
|
||||
var c byte
|
||||
for c = <-data; c != ' ' && c != '\x00'; c = <-data {
|
||||
if c >= 'a' && c <= 'z' {
|
||||
msgType[pos] = c
|
||||
pos++
|
||||
} else {
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object type parse '%c' at %d", c, pos)
|
||||
}
|
||||
}
|
||||
msgType = msgType[:pos]
|
||||
|
||||
switch string(msgType) {
|
||||
case "commit", "tree", "blob":
|
||||
break
|
||||
case "missing":
|
||||
if c != '\x00' {
|
||||
return GitMsg{}, fmt.Errorf("Missing format weird")
|
||||
}
|
||||
return GitMsg{
|
||||
hash: string(id[:]),
|
||||
itemType: "missing",
|
||||
size: 0,
|
||||
}, fmt.Errorf("Object not found: '%s'", string(id))
|
||||
default:
|
||||
return GitMsg{}, fmt.Errorf("Invalid object type: '%s'", string(msgType))
|
||||
}
|
||||
|
||||
for c = <-data; c != '\000'; c = <-data {
|
||||
if c >= '0' && c <= '9' {
|
||||
size = size*10 + (int(c) - '0')
|
||||
} else {
|
||||
return GitMsg{}, fmt.Errorf("Invalid character during object size parse: '%c'", c)
|
||||
}
|
||||
}
|
||||
|
||||
return GitMsg{
|
||||
hash: string(id[:]),
|
||||
itemType: string(msgType),
|
||||
size: size,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func parseGitCommitHdr(data <-chan byte) ([2]string, error) {
|
||||
hdr := make([]byte, 0, 60)
|
||||
val := make([]byte, 0, 1000)
|
||||
|
||||
c := <-data
|
||||
if c != '\n' { // end of header marker
|
||||
for ; c != ' '; c = <-data {
|
||||
hdr = append(hdr, c)
|
||||
}
|
||||
for c := <-data; c != '\n'; c = <-data {
|
||||
val = append(val, c)
|
||||
}
|
||||
}
|
||||
|
||||
return [2]string{string(hdr), string(val)}, nil
|
||||
}
|
||||
|
||||
func parseGitCommitMsg(data <-chan byte, l int) (string, error) {
|
||||
msg := make([]byte, 0, l)
|
||||
|
||||
for c := <-data; c != '\x00'; c = <-data {
|
||||
msg = append(msg, c)
|
||||
l--
|
||||
}
|
||||
// l--
|
||||
|
||||
if l != 0 {
|
||||
return "", fmt.Errorf("Unexpected data in the git commit msg: l=%d", l)
|
||||
}
|
||||
|
||||
return string(msg), nil
|
||||
}
|
||||
|
||||
func parseGitCommit(data <-chan byte) (GitCommit, error) {
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return GitCommit{}, err
|
||||
} else if hdr.itemType != "commit" {
|
||||
return GitCommit{}, fmt.Errorf("expected commit but parsed %s", hdr.itemType)
|
||||
}
|
||||
|
||||
var c GitCommit
|
||||
l := hdr.size
|
||||
for {
|
||||
hdr, err := parseGitCommitHdr(data)
|
||||
if err != nil {
|
||||
return GitCommit{}, nil
|
||||
}
|
||||
|
||||
if len(hdr[0])+len(hdr[1]) == 0 { // hdr end marker
|
||||
break
|
||||
}
|
||||
|
||||
switch hdr[0] {
|
||||
case "tree":
|
||||
c.Tree = hdr[1]
|
||||
}
|
||||
|
||||
l -= len(hdr[0]) + len(hdr[1]) + 2
|
||||
}
|
||||
l--
|
||||
|
||||
c.Msg, err = parseGitCommitMsg(data, l)
|
||||
return c, err
|
||||
}
|
||||
|
||||
func parseTreeEntry(data <-chan byte, hashLen int) (GitTreeEntry, error) {
|
||||
var e GitTreeEntry
|
||||
|
||||
for c := <-data; c != ' '; c = <-data {
|
||||
e.mode = e.mode*8 + int(c-'0')
|
||||
e.size++
|
||||
}
|
||||
e.size++
|
||||
|
||||
name := make([]byte, 0, 128)
|
||||
for c := <-data; c != '\x00'; c = <-data {
|
||||
name = append(name, c)
|
||||
e.size++
|
||||
}
|
||||
e.size++
|
||||
e.name = string(name)
|
||||
|
||||
const hexBinToAscii = "0123456789abcdef"
|
||||
|
||||
hash := make([]byte, 0, hashLen*2)
|
||||
for range hashLen {
|
||||
c := <-data
|
||||
hash = append(hash, hexBinToAscii[((c&0xF0)>>4)], hexBinToAscii[c&0xF])
|
||||
}
|
||||
e.hash = string(hash)
|
||||
e.size += hashLen
|
||||
|
||||
return e, nil
|
||||
}
|
||||
|
||||
func parseGitTree(data <-chan byte) (GitTree, error) {
|
||||
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return GitTree{}, err
|
||||
}
|
||||
|
||||
// max capacity to length of hash
|
||||
t := GitTree{items: make([]GitTreeEntry, 0, hdr.size/len(hdr.hash))}
|
||||
parsedLen := 0
|
||||
for parsedLen < hdr.size {
|
||||
entry, err := parseTreeEntry(data, len(hdr.hash)/2)
|
||||
if err != nil {
|
||||
return GitTree{}, nil
|
||||
}
|
||||
|
||||
t.items = append(t.items, entry)
|
||||
parsedLen += entry.size
|
||||
}
|
||||
c := <-data // \0 read
|
||||
|
||||
if c != '\x00' {
|
||||
return t, fmt.Errorf("Unexpected character during git tree data read")
|
||||
}
|
||||
|
||||
if parsedLen != hdr.size {
|
||||
return t, fmt.Errorf("Invalid size of git tree data")
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
func parseGitBlob(data <-chan byte) ([]byte, error) {
|
||||
hdr, err := parseGitMsg(data)
|
||||
if err != nil {
|
||||
return []byte{}, err
|
||||
}
|
||||
|
||||
d := make([]byte, hdr.size)
|
||||
for l := 0; l < hdr.size; l++ {
|
||||
d[l] = <-data
|
||||
}
|
||||
eob := <-data
|
||||
if eob != '\x00' {
|
||||
return d, fmt.Errorf("invalid byte read in parseGitBlob")
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitParseCommits(cwd string, commitIDs []string) (parsedCommits []GitCommit, err error) {
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
parsedCommits = make([]GitCommit, 0, len(commitIDs))
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
for _, id := range commitIDs {
|
||||
data_out.Write([]byte(id))
|
||||
data_out.ch <- '\x00'
|
||||
c, e := parseGitCommit(data_in.ch)
|
||||
if e != nil {
|
||||
err = fmt.Errorf("Error parsing git commit: %w", e)
|
||||
return
|
||||
}
|
||||
|
||||
parsedCommits = append(parsedCommits, c)
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: support sub-trees
|
||||
func (e *GitHandlerImpl) GitCatFile(cwd, commitId, filename string) (data []byte, err error) {
|
||||
var done sync.Mutex
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
c, err := parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing git commit: %v\n", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
tree, err := parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
if e.DebugLogger {
|
||||
log.Printf("Error parsing git tree: %v\n", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isBlob() && te.name == filename {
|
||||
data_out.Write([]byte(te.hash))
|
||||
data_out.ch <- '\x00'
|
||||
data, err = parseGitBlob(data_in.ch)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = fmt.Errorf("file not found: '%s'", filename)
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return
|
||||
}
|
||||
|
||||
// return (filename) -> (hash) map for all submodules
|
||||
// TODO: recursive? map different orgs, not just assume '.' for path
|
||||
func (e *GitHandlerImpl) GitSubmoduleList(gitPath, commitId string) (submoduleList map[string]string, err error) {
|
||||
var done sync.Mutex
|
||||
submoduleList = make(map[string]string)
|
||||
|
||||
done.Lock()
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
|
||||
go func() {
|
||||
defer done.Unlock()
|
||||
defer close(data_out.ch)
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
var c GitCommit
|
||||
c, err = parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git commit. Err: %w", err)
|
||||
return
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
var tree GitTree
|
||||
tree, err = parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error parsing git tree: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.isSubmodule() {
|
||||
submoduleList[te.name] = te.hash
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, gitPath)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
if e.DebugLogger {
|
||||
log.Println(string(data))
|
||||
}
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
err = cmd.Run()
|
||||
|
||||
done.Lock()
|
||||
return submoduleList, err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string) (subCommitId string, valid bool) {
|
||||
data_in, data_out := ChanIO{make(chan byte, 256)}, ChanIO{make(chan byte, 70)}
|
||||
var wg sync.WaitGroup
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
if e.DebugLogger {
|
||||
log.Printf("getting commit id '%s' from git at '%s' with packageName: %s\n", commitId, cwd, packageName)
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
subCommitId = "wrong"
|
||||
commitId = "ok"
|
||||
valid = false
|
||||
}
|
||||
}()
|
||||
|
||||
defer wg.Done()
|
||||
defer close(data_out.ch)
|
||||
|
||||
data_out.Write([]byte(commitId))
|
||||
data_out.ch <- '\x00'
|
||||
c, err := parseGitCommit(data_in.ch)
|
||||
if err != nil {
|
||||
log.Panicf("Error parsing git commit: %v\n", err)
|
||||
}
|
||||
data_out.Write([]byte(c.Tree))
|
||||
data_out.ch <- '\x00'
|
||||
tree, err := parseGitTree(data_in.ch)
|
||||
|
||||
if err != nil {
|
||||
log.Panicf("Error parsing git tree: %v\n", err)
|
||||
}
|
||||
|
||||
for _, te := range tree.items {
|
||||
if te.name == packageName && te.isSubmodule() {
|
||||
subCommitId = te.hash
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "cat-file", "--batch", "-Z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stdout = &data_in
|
||||
cmd.Stdin = &data_out
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
log.Println(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Printf("Error running command %v, err: %v", cmd.Args, err)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
return subCommitId, len(subCommitId) == len(commitId)
|
||||
}
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
}
|
||||
|
||||
func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGitStatusString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 7); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGitStatusString(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) ([]GitStatusData, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
if e.DebugLogger {
|
||||
log.Println("getting git-status()")
|
||||
}
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "status", "--porcelain=2", "-z")
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
}
|
||||
cmd.Dir = filepath.Join(e.GitPath, cwd)
|
||||
cmd.Stderr = writeFunc(func(data []byte) (int, error) {
|
||||
log.Println(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
if e.DebugLogger {
|
||||
log.Printf("command run: %v\n", cmd.Args)
|
||||
}
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("Error running command %v, err: %v", cmd.Args, err)
|
||||
}
|
||||
|
||||
return parseGitStatusData(bufio.NewReader(bytes.NewReader(out)))
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
// Code generated by go-swagger; DO NOT EDIT.
|
||||
|
||||
package issue
|
||||
|
||||
// This file was generated by the swagger tool.
|
||||
// Editing this file might prove futile when you re-run the swagger generate command
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/go-openapi/runtime"
|
||||
"github.com/go-openapi/strfmt"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
// IssueSearchIssuesReader is a Reader for the IssueSearchIssues structure.
|
||||
type IssueSearchIssuesReader struct {
|
||||
formats strfmt.Registry
|
||||
}
|
||||
|
||||
// ReadResponse reads a server response into the received o.
|
||||
func (o *IssueSearchIssuesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
|
||||
switch response.Code() {
|
||||
case 200:
|
||||
result := NewIssueSearchIssuesOK()
|
||||
if err := result.readResponse(response, consumer, o.formats); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
default:
|
||||
return nil, runtime.NewAPIError("[GET /repos/issues/search] issueSearchIssues", response, response.Code())
|
||||
}
|
||||
}
|
||||
|
||||
// NewIssueSearchIssuesOK creates a IssueSearchIssuesOK with default headers values
|
||||
func NewIssueSearchIssuesOK() *IssueSearchIssuesOK {
|
||||
return &IssueSearchIssuesOK{}
|
||||
}
|
||||
|
||||
/*
|
||||
IssueSearchIssuesOK describes a response with status code 200, with default header values.
|
||||
|
||||
IssueList
|
||||
*/
|
||||
type IssueSearchIssuesOK struct {
|
||||
Payload []*models.Issue
|
||||
}
|
||||
|
||||
// IsSuccess returns true when this issue search issues o k response has a 2xx status code
|
||||
func (o *IssueSearchIssuesOK) IsSuccess() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// IsRedirect returns true when this issue search issues o k response has a 3xx status code
|
||||
func (o *IssueSearchIssuesOK) IsRedirect() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsClientError returns true when this issue search issues o k response has a 4xx status code
|
||||
func (o *IssueSearchIssuesOK) IsClientError() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsServerError returns true when this issue search issues o k response has a 5xx status code
|
||||
func (o *IssueSearchIssuesOK) IsServerError() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// IsCode returns true when this issue search issues o k response a status code equal to that given
|
||||
func (o *IssueSearchIssuesOK) IsCode(code int) bool {
|
||||
return code == 200
|
||||
}
|
||||
|
||||
// Code gets the status code for the issue search issues o k response
|
||||
func (o *IssueSearchIssuesOK) Code() int {
|
||||
return 200
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) Error() string {
|
||||
payload, _ := json.Marshal(o.Payload)
|
||||
return fmt.Sprintf("[GET /repos/issues/search][%d] issueSearchIssuesOK %s", 200, payload)
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) String() string {
|
||||
payload, _ := json.Marshal(o.Payload)
|
||||
return fmt.Sprintf("[GET /repos/issues/search][%d] issueSearchIssuesOK %s", 200, payload)
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) GetPayload() []*models.Issue {
|
||||
return o.Payload
|
||||
}
|
||||
|
||||
func (o *IssueSearchIssuesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
|
||||
|
||||
// response payload
|
||||
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
// Code generated by go-swagger; DO NOT EDIT.
|
||||
|
||||
package models
|
||||
|
||||
// This file was generated by the swagger tool.
|
||||
// Editing this file might prove futile when you re-run the swagger generate command
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// AddCollaboratorOption AddCollaboratorOption options when adding a user as a collaborator of a repository
|
||||
//
|
||||
// swagger:model AddCollaboratorOption
|
||||
type AddCollaboratorOption struct {
|
||||
|
||||
// permission
|
||||
Permission string `json:"permission,omitempty"`
|
||||
}
|
||||
|
||||
// Validate validates this add collaborator option
|
||||
func (m *AddCollaboratorOption) Validate(formats strfmt.Registry) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ContextValidate validates this add collaborator option based on context it is used
|
||||
func (m *AddCollaboratorOption) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalBinary interface implementation
|
||||
func (m *AddCollaboratorOption) MarshalBinary() ([]byte, error) {
|
||||
if m == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return swag.WriteJSON(m)
|
||||
}
|
||||
|
||||
// UnmarshalBinary interface implementation
|
||||
func (m *AddCollaboratorOption) UnmarshalBinary(b []byte) error {
|
||||
var res AddCollaboratorOption
|
||||
if err := swag.ReadJSON(b, &res); err != nil {
|
||||
return err
|
||||
}
|
||||
*m = res
|
||||
return nil
|
||||
}
|
||||
@@ -1,586 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
transport "github.com/go-openapi/runtime/client"
|
||||
"github.com/go-openapi/strfmt"
|
||||
apiclient "src.opensuse.org/autogits/common/gitea-generated/client"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/notification"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/organization"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/user"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=gitea_utils.go -destination=mock/gitea_utils.go -typed
|
||||
|
||||
// maintainer list file in ProjectGit
|
||||
const (
|
||||
MaintainershipFile = "_maintainership.json"
|
||||
MaintainershipDir = "maintainership"
|
||||
)
|
||||
|
||||
const (
|
||||
// from Gitea
|
||||
// ReviewStateApproved pr is approved
|
||||
ReviewStateApproved models.ReviewStateType = "APPROVED"
|
||||
// ReviewStatePending pr state is pending
|
||||
ReviewStatePending models.ReviewStateType = "PENDING"
|
||||
// ReviewStateComment is a comment review
|
||||
ReviewStateComment models.ReviewStateType = "COMMENT"
|
||||
// ReviewStateRequestChanges changes for pr are requested
|
||||
ReviewStateRequestChanges models.ReviewStateType = "REQUEST_CHANGES"
|
||||
// ReviewStateRequestReview review is requested from user
|
||||
ReviewStateRequestReview models.ReviewStateType = "REQUEST_REVIEW"
|
||||
// ReviewStateUnknown state of pr is unknown
|
||||
ReviewStateUnknown models.ReviewStateType = ""
|
||||
)
|
||||
|
||||
type GiteaMaintainershipReader interface {
|
||||
FetchMaintainershipFile(org, prjGit, branch string) ([]byte, string, error)
|
||||
FetchMaintainershipDirFile(org, prjGit, branch, pkg string) ([]byte, string, error)
|
||||
}
|
||||
|
||||
type GiteaPRFetcher interface {
|
||||
GetPullRequest(org, project string, num int64) (*models.PullRequest, error)
|
||||
GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error)
|
||||
}
|
||||
|
||||
type GiteaReviewFetcher interface {
|
||||
GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaPRChecker interface {
|
||||
GiteaReviewFetcher
|
||||
GiteaMaintainershipReader
|
||||
}
|
||||
|
||||
type GiteaReviewFetcherAndRequester interface {
|
||||
GiteaReviewFetcher
|
||||
GiteaReviewRequester
|
||||
}
|
||||
|
||||
type GiteaReviewRequester interface {
|
||||
RequestReviews(pr *models.PullRequest, reviewer ...string) ([]*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaReviewer interface {
|
||||
AddReviewComment(pr *models.PullRequest, state models.ReviewStateType, comment string) (*models.PullReview, error)
|
||||
}
|
||||
|
||||
type GiteaRepoFetcher interface {
|
||||
GetRepository(org, repo string) (*models.Repository, error)
|
||||
}
|
||||
|
||||
type Gitea interface {
|
||||
GiteaRepoFetcher
|
||||
GiteaReviewRequester
|
||||
GiteaReviewer
|
||||
GiteaPRFetcher
|
||||
GiteaReviewFetcher
|
||||
GiteaMaintainershipReader
|
||||
|
||||
GetPullNotifications(since *time.Time) ([]*models.NotificationThread, error)
|
||||
SetNotificationRead(notificationId int64) error
|
||||
GetOrganization(orgName string) (*models.Organization, error)
|
||||
GetOrganizationRepositories(orgName string) ([]*models.Repository, error)
|
||||
CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error)
|
||||
GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error)
|
||||
GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error)
|
||||
GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error)
|
||||
GetRecentPullRequests(org, repo string) ([]*models.PullRequest, error)
|
||||
GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error)
|
||||
|
||||
GetCurrentUser() (*models.User, error)
|
||||
}
|
||||
|
||||
type GiteaTransport struct {
|
||||
transport *transport.Runtime
|
||||
client *apiclient.GiteaAPI
|
||||
}
|
||||
|
||||
func AllocateGiteaTransport(host string) Gitea {
|
||||
var r GiteaTransport
|
||||
|
||||
r.transport = transport.New(host, apiclient.DefaultBasePath, [](string){"https"})
|
||||
r.transport.DefaultAuthentication = transport.BearerToken(giteaToken)
|
||||
|
||||
r.client = apiclient.New(r.transport, nil)
|
||||
|
||||
return &r
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) FetchMaintainershipFile(org, repo, branch string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(org, repo, branch, MaintainershipFile)
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) FetchMaintainershipDirFile(org, repo, branch, pkg string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(org, repo, branch, path.Join(MaintainershipDir, pkg))
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequest(org, project string, num int64) (*models.PullRequest, error) {
|
||||
pr, err := gitea.client.Repository.RepoGetPullRequest(
|
||||
repository.NewRepoGetPullRequestParams().
|
||||
WithDefaults().
|
||||
WithOwner(org).
|
||||
WithRepo(project).
|
||||
WithIndex(num),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
return pr.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepository(org, pkg string) (*models.Repository, error) {
|
||||
repo, err := gitea.client.Repository.RepoGet(repository.NewRepoGetParams().WithDefaults().WithOwner(org).WithRepo(pkg), gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequestReviews(org, project string, PRnum int64) ([]*models.PullReview, error) {
|
||||
limit := int64(20)
|
||||
var page int64
|
||||
var allReviews []*models.PullReview
|
||||
|
||||
for {
|
||||
reviews, err := gitea.client.Repository.RepoListPullReviews(
|
||||
repository.NewRepoListPullReviewsParams().
|
||||
WithDefaults().
|
||||
WithOwner(org).
|
||||
WithRepo(project).
|
||||
WithIndex(PRnum).
|
||||
WithPage(&page).
|
||||
WithLimit(&limit),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allReviews = slices.Concat(allReviews, reviews.Payload)
|
||||
if len(reviews.Payload) < int(limit) {
|
||||
break
|
||||
}
|
||||
page++
|
||||
}
|
||||
|
||||
return allReviews, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullNotifications(since *time.Time) ([]*models.NotificationThread, error) {
|
||||
bigLimit := int64(100000)
|
||||
|
||||
params := notification.NewNotifyGetListParams().
|
||||
WithDefaults().
|
||||
WithSubjectType([]string{"Pull"}).
|
||||
WithStatusTypes([]string{"unread"}).
|
||||
WithLimit(&bigLimit)
|
||||
|
||||
if since != nil {
|
||||
s := strfmt.DateTime(*since)
|
||||
params.SetSince(&s)
|
||||
}
|
||||
|
||||
list, err := gitea.client.Notification.NotifyGetList(params, gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return list.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) SetNotificationRead(notificationId int64) error {
|
||||
_, err := gitea.client.Notification.NotifyReadThread(
|
||||
notification.NewNotifyReadThreadParams().
|
||||
WithDefaults().
|
||||
WithID(fmt.Sprint(notificationId)),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error setting notification: %d. Err: %w", notificationId, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetOrganization(orgName string) (*models.Organization, error) {
|
||||
org, err := gitea.client.Organization.OrgGet(
|
||||
organization.NewOrgGetParams().WithOrg(orgName),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching org: '%s' data. Err: %w", orgName, err)
|
||||
}
|
||||
return org.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetOrganizationRepositories(orgName string) ([]*models.Repository, error) {
|
||||
var page int64
|
||||
repos := make([]*models.Repository, 0, 100)
|
||||
|
||||
page = 1
|
||||
for {
|
||||
ret, err := gitea.client.Organization.OrgListRepos(
|
||||
organization.NewOrgListReposParams().WithOrg(orgName).WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error retrieving repository list for org: '%s'. Err: %w", orgName, err)
|
||||
}
|
||||
|
||||
if len(ret.Payload) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
repos = append(repos, ret.Payload...)
|
||||
page++
|
||||
}
|
||||
|
||||
return repos, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error) {
|
||||
repo, err := gitea.client.Repository.RepoGet(
|
||||
repository.NewRepoGetParams().WithDefaults().WithOwner(org).WithRepo(repoName),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *repository.RepoGetNotFound:
|
||||
repo, err := gitea.client.Organization.CreateOrgRepo(
|
||||
organization.NewCreateOrgRepoParams().WithDefaults().WithBody(
|
||||
&models.CreateRepoOption{
|
||||
AutoInit: false,
|
||||
Name: &repoName,
|
||||
ObjectFormatName: models.CreateRepoOptionObjectFormatNameSha256,
|
||||
},
|
||||
).WithOrg(org),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *organization.CreateOrgRepoCreated:
|
||||
// weird, but ok, repo created
|
||||
default:
|
||||
return nil, fmt.Errorf("error creating repo '%s' under '%s'. Err: %w", repoName, org, err)
|
||||
}
|
||||
}
|
||||
|
||||
// initialize repository
|
||||
if err = os.Mkdir(filepath.Join(git.GetPath(), DefaultGitPrj), 0700); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "init", "--object-format="+repo.Payload.ObjectFormatName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "checkout", "-b", repo.Payload.DefaultBranch); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
readmeFilename := filepath.Join(git.GetPath(), DefaultGitPrj, "README.md")
|
||||
{
|
||||
file, _ := os.Create(readmeFilename)
|
||||
defer file.Close()
|
||||
|
||||
io.WriteString(file, ReadmeBoilerplate)
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "add", "README.md"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "commit", "-m", "Automatic devel project creation"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = git.GitExec(DefaultGitPrj, "remote", "add", "origin", repo.Payload.SSHURL); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return repo.Payload, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("cannot fetch repo data for %s/%s: %w", org, repoName, err)
|
||||
}
|
||||
}
|
||||
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
prOptions := models.CreatePullRequestOption{
|
||||
Base: repo.DefaultBranch,
|
||||
Head: srcId,
|
||||
Title: title,
|
||||
Body: body,
|
||||
}
|
||||
|
||||
if pr, err := gitea.client.Repository.RepoGetPullRequestByBaseHead(
|
||||
repository.NewRepoGetPullRequestByBaseHeadParams().WithOwner(repo.Owner.UserName).WithRepo(repo.Name).WithBase(repo.DefaultBranch).WithHead(srcId),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
); err == nil {
|
||||
return pr.Payload, nil
|
||||
}
|
||||
|
||||
pr, err := gitea.client.Repository.RepoCreatePullRequest(
|
||||
repository.
|
||||
NewRepoCreatePullRequestParams().
|
||||
WithDefaults().
|
||||
WithOwner(repo.Owner.UserName).
|
||||
WithRepo(repo.Name).
|
||||
WithBody(&prOptions),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create pull request. %w", err)
|
||||
}
|
||||
|
||||
return pr.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetAssociatedPrjGitPR(prjGitOrg, prjGitRepo, refOrg, refRepo string, Index int64) (*models.PullRequest, error) {
|
||||
var page int64
|
||||
state := "open"
|
||||
for {
|
||||
page++
|
||||
prs, err := gitea.client.Repository.RepoListPullRequests(
|
||||
repository.
|
||||
NewRepoListPullRequestsParams().
|
||||
WithDefaults().
|
||||
WithOwner(prjGitOrg).
|
||||
WithRepo(prjGitRepo).
|
||||
WithState(&state).
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot fetch PR list for %s / %s : %w", prjGitOrg, prjGitRepo, err)
|
||||
}
|
||||
|
||||
prLine := fmt.Sprintf(PrPattern, refOrg, refRepo, Index)
|
||||
|
||||
// payload_processing:
|
||||
for _, pr := range prs.Payload {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
if strings.TrimSpace(line) == prLine {
|
||||
return pr, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(prs.Payload) < 10 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewers ...string) ([]*models.PullReview, error) {
|
||||
reviewOptions := models.PullReviewRequestOptions{
|
||||
Reviewers: reviewers,
|
||||
}
|
||||
|
||||
review, err := gitea.client.Repository.RepoCreatePullReviewRequests(
|
||||
repository.
|
||||
NewRepoCreatePullReviewRequestsParams().
|
||||
WithOwner(pr.Base.Repo.Owner.UserName).
|
||||
WithRepo(pr.Base.Repo.Name).
|
||||
WithIndex(pr.Index).
|
||||
WithBody(&reviewOptions),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create pull request reviews: %w", err)
|
||||
}
|
||||
|
||||
return review.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) AddReviewComment(pr *models.PullRequest, state models.ReviewStateType, comment string) (*models.PullReview, error) {
|
||||
c, err := gitea.client.Repository.RepoCreatePullReview(
|
||||
repository.NewRepoCreatePullReviewParams().
|
||||
WithDefaults().
|
||||
WithOwner(pr.Base.Repo.Owner.UserName).
|
||||
WithRepo(pr.Base.Repo.Name).
|
||||
WithIndex(pr.Index).
|
||||
WithBody(&models.CreatePullReviewOptions{
|
||||
Event: state,
|
||||
Body: comment,
|
||||
}),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
/*
|
||||
c, err := client.Repository.RepoSubmitPullReview(
|
||||
repository.NewRepoSubmitPullReviewParams().
|
||||
WithDefaults().
|
||||
WithOwner(pr.Base.Repo.Owner.UserName).
|
||||
WithRepo(pr.Base.Repo.Name).
|
||||
WithIndex(pr.Index).
|
||||
WithID(review.ID).
|
||||
WithBody(&models.SubmitPullReviewOptions{
|
||||
Event: state,
|
||||
Body: comment,
|
||||
}),
|
||||
transport.DefaultAuthentication,
|
||||
)
|
||||
*/
|
||||
|
||||
/* c, err := client.Issue.IssueCreateComment(
|
||||
issue.NewIssueCreateCommentParams().
|
||||
WithDefaults().
|
||||
WithOwner(pr.Base.Repo.Owner.UserName).
|
||||
WithRepo(pr.Base.Repo.Name).
|
||||
WithIndex(pr.Index).
|
||||
WithBody(&models.CreateIssueCommentOption{
|
||||
Body: &comment,
|
||||
}),
|
||||
transport.DefaultAuthentication)
|
||||
*/
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return c.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
|
||||
params := repository.NewRepoGetContentsParams().WithOwner(org).WithRepo(repo).WithFilepath(path)
|
||||
if len(hash) > 0 {
|
||||
params = params.WithRef(&hash)
|
||||
}
|
||||
content, err := gitea.client.Repository.RepoGetContents(params,
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
if content.Payload.Encoding != "base64" {
|
||||
return nil, "", fmt.Errorf("Unhandled content encoding: %s", content.Payload.Encoding)
|
||||
}
|
||||
|
||||
if content.Payload.Size > 10000000 {
|
||||
return nil, "", fmt.Errorf("Content length is too large for %s/%s/%s#%s - %d bytes", org, repo, path, hash, content.Payload.Size)
|
||||
}
|
||||
|
||||
data := make([]byte, content.Payload.Size)
|
||||
n, err := base64.RawStdEncoding.Decode(data, []byte(content.Payload.Content))
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Error decoding file %s/%s/%s#%s : %w", org, repo, path, hash, err)
|
||||
}
|
||||
if n != int(content.Payload.Size) {
|
||||
return nil, "", fmt.Errorf("Decoded length doesn't match expected for %s/%s/%s#%s - %d vs %d bytes", org, repo, path, hash, content.Payload.Size, n)
|
||||
}
|
||||
|
||||
return data, content.Payload.SHA, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error) {
|
||||
return gitea.GetRepositoryFileContent(pr.Head.Repo.Owner.UserName, pr.Head.Repo.Name, pr.Head.Sha, path)
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRecentPullRequests(org, repo string) ([]*models.PullRequest, error) {
|
||||
prs := make([]*models.PullRequest, 0, 10)
|
||||
var page int64
|
||||
page = 1
|
||||
sort := "recentupdate"
|
||||
|
||||
for {
|
||||
res, err := gitea.client.Repository.RepoListPullRequests(
|
||||
repository.NewRepoListPullRequestsParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithPage(&page).
|
||||
WithSort(&sort),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
prs = append(prs, res.Payload...)
|
||||
n := len(res.Payload)
|
||||
if n < 10 {
|
||||
break
|
||||
}
|
||||
|
||||
// if pr is closed for more than a week, assume that we are done too
|
||||
if time.Since(time.Time(res.Payload[n-1].Updated)) > 7*24*time.Hour {
|
||||
break
|
||||
}
|
||||
|
||||
page++
|
||||
}
|
||||
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error) {
|
||||
not := false
|
||||
var page int64 = 1
|
||||
commits, err := gitea.client.Repository.RepoGetAllCommits(
|
||||
repository.NewRepoGetAllCommitsParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithSha(&branch).
|
||||
WithPage(&page).
|
||||
WithStat(¬).
|
||||
WithFiles(¬).
|
||||
WithVerification(¬).
|
||||
WithLimit(&commitNo),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return commits.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetCurrentUser() (*models.User, error) {
|
||||
user, err := gitea.client.User.UserGetCurrent(
|
||||
user.NewUserGetCurrentParams(),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return user.GetPayload(), nil
|
||||
}
|
||||
@@ -1,317 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
rabbitmq "github.com/rabbitmq/amqp091-go"
|
||||
)
|
||||
|
||||
const RequestType_CreateBrachTag = "create"
|
||||
const RequestType_DeleteBranchTag = "delete"
|
||||
const RequestType_Fork = "fork"
|
||||
const RequestType_Issue = "issues"
|
||||
const RequestType_IssueAssign = "issue_assign"
|
||||
const RequestType_IssueComment = "issue_comment"
|
||||
const RequestType_IssueLabel = "issue_label"
|
||||
const RequestType_IssueMilestone = "issue_milestone"
|
||||
const RequestType_Push = "push"
|
||||
const RequestType_Repository = "repository"
|
||||
const RequestType_Release = "release"
|
||||
const RequestType_PR = "pull_request"
|
||||
const RequestType_PRAssign = "pull_request_assign"
|
||||
const RequestType_PRLabel = "pull_request_label"
|
||||
const RequestType_PRComment = "pull_request_comment"
|
||||
const RequestType_PRMilestone = "pull_request_milestone"
|
||||
const RequestType_PRSync = "pull_request_sync"
|
||||
const RequestType_PRReviewAccepted = "pull_request_review_approved"
|
||||
const RequestType_PRReviewRejected = "pull_request_review_rejected"
|
||||
const RequestType_PRReviewRequest = "pull_request_review_request"
|
||||
const RequestType_PRReviewComment = "pull_request_review_comment"
|
||||
const RequestType_Wiki = "wiki"
|
||||
|
||||
type RequestProcessor interface {
|
||||
ProcessFunc(*Request) error
|
||||
}
|
||||
|
||||
type ListenDefinitions struct {
|
||||
RabbitURL *url.URL // amqps://user:password@host/queue
|
||||
|
||||
GitAuthor string
|
||||
Handlers map[string]RequestProcessor
|
||||
Orgs []string
|
||||
|
||||
topics []string
|
||||
topicSubChanges chan string // +topic = subscribe, -topic = unsubscribe
|
||||
}
|
||||
|
||||
type RabbitMessage rabbitmq.Delivery
|
||||
|
||||
func (l *ListenDefinitions) processTopicChanges(ch *rabbitmq.Channel, queueName string) {
|
||||
for {
|
||||
topic, ok := <-l.topicSubChanges
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
log.Println(" topic change:", topic)
|
||||
switch topic[0] {
|
||||
case '+':
|
||||
if err := ch.QueueBind(queueName, topic[1:], "pubsub", false, nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
case '-':
|
||||
if err := ch.QueueUnbind(queueName, topic[1:], "pubsub", nil); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
default:
|
||||
log.Println("Ignoring topic change.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) processRabbitMQ(msgCh chan<- RabbitMessage) error {
|
||||
queueName := l.RabbitURL.Path
|
||||
l.RabbitURL.Path = ""
|
||||
|
||||
if len(queueName) > 0 && queueName[0] == '/' {
|
||||
queueName = queueName[1:]
|
||||
}
|
||||
|
||||
connection, err := rabbitmq.DialTLS(l.RabbitURL.String(), &tls.Config{
|
||||
ServerName: l.RabbitURL.Hostname(),
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot connect to %s . Err: %w", l.RabbitURL.Hostname(), err)
|
||||
}
|
||||
defer connection.Close()
|
||||
|
||||
ch, err := connection.Channel()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot create a channel. Err: %w", err)
|
||||
}
|
||||
defer ch.Close()
|
||||
|
||||
if err = ch.ExchangeDeclarePassive("pubsub", "topic", true, false, false, false, nil); err != nil {
|
||||
return fmt.Errorf("Cannot find pubsub exchange? Err: %w", err)
|
||||
}
|
||||
|
||||
var q rabbitmq.Queue
|
||||
if len(queueName) == 0 {
|
||||
q, err = ch.QueueDeclare("", false, true, true, false, nil)
|
||||
} else {
|
||||
q, err = ch.QueueDeclarePassive(queueName, true, false, true, false, nil)
|
||||
if err != nil {
|
||||
log.Printf("queue not found .. trying to create it: %v\n", err)
|
||||
if ch.IsClosed() {
|
||||
ch, err = connection.Channel()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
|
||||
}
|
||||
}
|
||||
q, err = ch.QueueDeclare(queueName, true, false, true, false, nil)
|
||||
|
||||
if err != nil {
|
||||
log.Printf("can't create persistent queue ... falling back to temporaty queue: %v\n", err)
|
||||
if ch.IsClosed() {
|
||||
ch, err = connection.Channel()
|
||||
return fmt.Errorf("Channel cannot be re-opened. Err: %w", err)
|
||||
}
|
||||
q, err = ch.QueueDeclare("", false, true, true, false, nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot declare queue. Err: %w", err)
|
||||
}
|
||||
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
|
||||
|
||||
log.Println(" -- listening to topics:")
|
||||
l.topicSubChanges = make(chan string)
|
||||
defer close(l.topicSubChanges)
|
||||
go l.processTopicChanges(ch, q.Name)
|
||||
|
||||
for _, topic := range l.topics {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
|
||||
msgs, err := ch.Consume(q.Name, "", true, true, false, false, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Cannot start consumer. Err: %w", err)
|
||||
}
|
||||
// log.Printf("queue: %s:%d", q.Name, q.Consumers)
|
||||
|
||||
for {
|
||||
msg, ok := <-msgs
|
||||
if !ok {
|
||||
return fmt.Errorf("channel/connection closed?\n")
|
||||
}
|
||||
|
||||
msgCh <- RabbitMessage(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectAndProcessRabbitMQ(log *log.Logger, ch chan<- RabbitMessage) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println(r)
|
||||
log.Println("'crash' RabbitMQ worker. Recovering... reconnecting...")
|
||||
time.Sleep(5 * time.Second)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
err := l.processRabbitMQ(ch)
|
||||
if err != nil {
|
||||
log.Printf("Error in RabbitMQ connection. %#v", err)
|
||||
log.Println("Reconnecting in 2 seconds...")
|
||||
time.Sleep(2 * time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) connectToRabbitMQ(log *log.Logger) chan RabbitMessage {
|
||||
ch := make(chan RabbitMessage, 100)
|
||||
go l.connectAndProcessRabbitMQ(log, ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func ProcessEvent(f RequestProcessor, request *Request) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Println("panic caught")
|
||||
if err, ok := r.(error); !ok {
|
||||
log.Println(err)
|
||||
}
|
||||
log.Println(string(debug.Stack()))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := f.ProcessFunc(request); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) generateTopics() []string {
|
||||
topics := make([]string, 0, len(l.Handlers)*len(l.Orgs))
|
||||
scope := "suse"
|
||||
if l.RabbitURL.Hostname() == "rabbit.opensuse.org" {
|
||||
scope = "opensuse"
|
||||
}
|
||||
|
||||
for _, org := range l.Orgs {
|
||||
for requestType, _ := range l.Handlers {
|
||||
topics = append(topics, fmt.Sprintf("%s.src.%s.%s.#", scope, org, requestType))
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(topics)
|
||||
return slices.Compact(topics)
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) UpdateTopics() {
|
||||
newTopics := l.generateTopics()
|
||||
|
||||
j := 0
|
||||
next_new_topic:
|
||||
for i := 0; i < len(newTopics); i++ {
|
||||
topic := newTopics[i]
|
||||
|
||||
for j < len(l.topics) {
|
||||
cmp := strings.Compare(topic, l.topics[j])
|
||||
|
||||
if cmp == 0 {
|
||||
j++
|
||||
continue next_new_topic
|
||||
}
|
||||
|
||||
if cmp < 0 {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
break
|
||||
}
|
||||
|
||||
l.topicSubChanges <- "-" + l.topics[j]
|
||||
j++
|
||||
}
|
||||
|
||||
if j == len(l.topics) {
|
||||
l.topicSubChanges <- "+" + topic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *ListenDefinitions) ProcessRabbitMQEvents() error {
|
||||
log.Println("RabbitMQ connection:", l.RabbitURL.String())
|
||||
log.Println(len(l.Handlers), len(l.Orgs))
|
||||
|
||||
l.RabbitURL.User = url.UserPassword(rabbitUser, rabbitPassword)
|
||||
l.topics = l.generateTopics()
|
||||
ch := l.connectToRabbitMQ(log.Default())
|
||||
|
||||
for {
|
||||
msg, ok := <-ch
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Println("event:", msg.RoutingKey)
|
||||
|
||||
route := strings.Split(msg.RoutingKey, ".")
|
||||
if len(route) > 3 {
|
||||
reqType := route[3]
|
||||
org := route[2]
|
||||
|
||||
if !slices.Contains(l.Orgs, org) {
|
||||
log.Println("Got event for unhandeled org:", org)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Println("org:", org, "type:", reqType)
|
||||
if handler, found := l.Handlers[reqType]; found {
|
||||
/* h, err := CreateRequestHandler()
|
||||
if err != nil {
|
||||
log.Println("Cannot create request handler", err)
|
||||
continue
|
||||
}
|
||||
*/
|
||||
req, err := ParseRequestJSON(reqType, msg.Body)
|
||||
if err != nil {
|
||||
log.Println("Error parsing request JSON:", err)
|
||||
continue
|
||||
} else {
|
||||
log.Println("processing req", req.Type)
|
||||
// h.Request = req
|
||||
ProcessEvent(handler, req)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestListenDefinitionsTopicUpdate(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
handlers []string
|
||||
orgs1, orgs2 []string
|
||||
|
||||
topicDelta []string
|
||||
}{
|
||||
{
|
||||
name: "no handlers, no orgs",
|
||||
},
|
||||
{
|
||||
name: "adding one org",
|
||||
handlers: []string{"foo"},
|
||||
orgs2: []string{"newOrg"},
|
||||
topicDelta: []string{"+suse"},
|
||||
},
|
||||
}
|
||||
|
||||
u, _ := url.Parse("amqps://rabbit.example.com")
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := ListenDefinitions{
|
||||
Orgs: test.orgs1,
|
||||
Handlers: make(map[string]RequestProcessor),
|
||||
topicSubChanges: make(chan string, len(test.topicDelta)*10),
|
||||
RabbitURL: u,
|
||||
}
|
||||
|
||||
for _, r := range test.handlers {
|
||||
l.Handlers[r] = nil
|
||||
}
|
||||
|
||||
l.UpdateTopics()
|
||||
if len(l.topicSubChanges) != len(test.topicDelta) {
|
||||
t.Fatal("topicSubChanges != topicDelta")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=maintainership.go -destination=mock/maintainership.go -typed
|
||||
|
||||
type MaintainershipData interface {
|
||||
ListProjectMaintainers() []string
|
||||
ListPackageMaintainers(pkg string) []string
|
||||
|
||||
IsApproved(pkg string, reviews []*models.PullReview) bool
|
||||
}
|
||||
|
||||
const ProjectKey = ""
|
||||
const ProjectFileKey = "_project"
|
||||
|
||||
type MaintainershipMap struct {
|
||||
Data map[string][]string
|
||||
IsDir bool
|
||||
FetchPackage func(string) ([]byte, error)
|
||||
}
|
||||
|
||||
func parseMaintainershipData(data []byte) (*MaintainershipMap, error) {
|
||||
maintainers := &MaintainershipMap{
|
||||
Data: make(map[string][]string),
|
||||
}
|
||||
if err := json.Unmarshal(data, &maintainers.Data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return maintainers, nil
|
||||
}
|
||||
|
||||
func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit, branch string) (*MaintainershipMap, error) {
|
||||
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, ProjectFileKey)
|
||||
dir := true
|
||||
if err != nil || data == nil {
|
||||
dir = false
|
||||
if _, notFound := err.(*repository.RepoGetRawFileNotFound); !notFound {
|
||||
return nil, err
|
||||
}
|
||||
data, _, err = gitea.FetchMaintainershipFile(org, prjGit, branch)
|
||||
if err != nil || data == nil {
|
||||
if _, notFound := err.(*repository.RepoGetRawFileNotFound); !notFound {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// no mainatiners
|
||||
data = []byte("{}")
|
||||
}
|
||||
}
|
||||
|
||||
m, err := parseMaintainershipData(data)
|
||||
if m != nil {
|
||||
m.IsDir = dir
|
||||
m.FetchPackage = func(pkg string) ([]byte, error) {
|
||||
data , _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, pkg)
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
return m, err
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) ListProjectMaintainers() []string {
|
||||
if data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
m, found := data.Data[ProjectKey]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func parsePkgDirData(pkg string, data []byte) []string {
|
||||
m := make(map[string][]string)
|
||||
if err := json.Unmarshal(data, &m); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgMaintainers, found := m[pkg]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
return pkgMaintainers
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) ListPackageMaintainers(pkg string) []string {
|
||||
if data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgMaintainers, found := data.Data[pkg]
|
||||
if !found && data.IsDir {
|
||||
pkgData, err := data.FetchPackage(pkg)
|
||||
if err == nil {
|
||||
pkgMaintainers = parsePkgDirData(pkg, pkgData)
|
||||
if len(pkgMaintainers) > 0 {
|
||||
data.Data[pkg] = pkgMaintainers
|
||||
}
|
||||
}
|
||||
}
|
||||
prjMaintainers := data.ListProjectMaintainers()
|
||||
|
||||
prjMaintainer:
|
||||
for _, prjm := range prjMaintainers {
|
||||
for i := range pkgMaintainers {
|
||||
if pkgMaintainers[i] == prjm {
|
||||
continue prjMaintainer
|
||||
}
|
||||
}
|
||||
pkgMaintainers = append(pkgMaintainers, prjm)
|
||||
}
|
||||
|
||||
return pkgMaintainers
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullReview) bool {
|
||||
reviewers, found := data.Data[pkg]
|
||||
if !found {
|
||||
if pkg != ProjectKey && data.IsDir {
|
||||
r, err := data.FetchPackage(pkg)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
reviewers = parsePkgDirData(pkg, r)
|
||||
data.Data[pkg] = reviewers
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if len(reviewers) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, review := range reviews {
|
||||
if !review.Stale && review.State == ReviewStateApproved && slices.Contains(reviewers, review.User.UserName) {
|
||||
return true
|
||||
}
|
||||
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) WriteMaintainershipFile(writer io.StringWriter) error {
|
||||
if data.IsDir {
|
||||
return fmt.Errorf("Not implemented")
|
||||
}
|
||||
|
||||
writer.WriteString("{\n")
|
||||
|
||||
if d, ok := data.Data[""]; ok {
|
||||
eol := ","
|
||||
if len(data.Data) == 1 {
|
||||
eol = ""
|
||||
}
|
||||
slices.Sort(d)
|
||||
str, _ := json.Marshal(d)
|
||||
writer.WriteString(fmt.Sprintf(" \"\": %s%s\n", string(str), eol))
|
||||
}
|
||||
|
||||
keys := make([]string, len(data.Data))
|
||||
i := 0
|
||||
for pkg := range data.Data {
|
||||
if pkg == "" {
|
||||
continue
|
||||
}
|
||||
keys[i] = pkg
|
||||
i++
|
||||
}
|
||||
if len(keys) >= i {
|
||||
keys = slices.Delete(keys, i, len(keys))
|
||||
}
|
||||
slices.Sort(keys)
|
||||
for i, pkg := range(keys) {
|
||||
eol := ","
|
||||
if i == len(keys)-1 {
|
||||
eol = ""
|
||||
}
|
||||
maintainers := data.Data[pkg]
|
||||
slices.Sort(maintainers)
|
||||
pkgStr, _ := json.Marshal(pkg)
|
||||
maintainersStr, _ := json.Marshal(maintainers)
|
||||
writer.WriteString(fmt.Sprintf(" %s: %s%s\n", pkgStr, maintainersStr, eol))
|
||||
}
|
||||
|
||||
writer.WriteString("}\n")
|
||||
return nil
|
||||
}
|
||||
@@ -1,333 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRInfo struct {
|
||||
PR *models.PullRequest
|
||||
Reviews *PRReviews
|
||||
}
|
||||
|
||||
type PRSet struct {
|
||||
PRs []PRInfo
|
||||
Config *AutogitConfig
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []PRInfo, config *AutogitConfig) ([]PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
retSet := []PRInfo{PRInfo{PR: pr}}
|
||||
|
||||
// only need to extact there on PrjGit PR
|
||||
if pr.Base.Repo.Name == config.GitProjectName && pr.Base.Repo.Owner.UserName == config.Organization {
|
||||
_, refPRs := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
for _, prdata := range refPRs {
|
||||
pr, err := gitea.GetPullRequest(prdata.Org, prdata.Repo, prdata.Num)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := readPRData(gitea, pr, slices.Concat(currentSet, retSet), config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
retSet = slices.Concat(retSet, data)
|
||||
}
|
||||
}
|
||||
|
||||
return retSet, nil
|
||||
}
|
||||
|
||||
func FetchPRSet(gitea GiteaPRFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
var pr *models.PullRequest
|
||||
var err error
|
||||
|
||||
if org != config.Organization || repo != config.GitProjectName {
|
||||
if pr, err = gitea.GetAssociatedPrjGitPR(config.Organization, config.GitProjectName, org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pr == nil {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
prs, err := readPRData(gitea, pr, nil, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRSet{PRs: prs, Config: config}, nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsPrjGitPR(pr *models.PullRequest) bool {
|
||||
return pr.Base.Repo.Name == rs.Config.GitProjectName && pr.Base.Repo.Owner.UserName == rs.Config.Organization
|
||||
}
|
||||
|
||||
func (rs *PRSet) GetPrjGitPR() (*models.PullRequest, error) {
|
||||
var ret *models.PullRequest
|
||||
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
if ret == nil {
|
||||
ret = prinfo.PR
|
||||
} else {
|
||||
return nil, errors.New("Multiple PrjGit PRs in one review set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ret != nil {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("No PrjGit PR found")
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsConsistent() bool {
|
||||
prjpr, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
_, prjpr_set := ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(prjpr.Body)))
|
||||
if len(prjpr_set) != len(rs.PRs)-1 { // 1 to many mapping
|
||||
return false
|
||||
}
|
||||
|
||||
next_rs:
|
||||
for _, prinfo := range rs.PRs {
|
||||
if prjpr == prinfo.PR {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, pr := range prjpr_set {
|
||||
if prinfo.PR.Base.Repo.Owner.UserName == pr.Org && prinfo.PR.Base.Repo.Name == pr.Repo && prinfo.PR.Index == pr.Num {
|
||||
continue next_rs
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintainers MaintainershipData) error {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
for _, pr := range rs.PRs {
|
||||
reviewers := []string{}
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
if len(rs.PRs) == 1 {
|
||||
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers())
|
||||
}
|
||||
} else {
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(), maintainers.ListPackageMaintainers(pkg))
|
||||
}
|
||||
|
||||
// submitters do not need to review their own work
|
||||
if idx := slices.Index(reviewers, pr.PR.User.UserName); idx != -1 {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
}
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
reviews, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for idx := 0; idx < len(reviewers); {
|
||||
user := reviewers[idx]
|
||||
if reviews.HasPendingReviewBy(user) || reviews.IsReviewedBy(user) {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
} else {
|
||||
idx++
|
||||
}
|
||||
}
|
||||
|
||||
// get maintainers associated with the PR too
|
||||
if len(reviewers) > 0 {
|
||||
if _, err := gitea.RequestReviews(pr.PR, reviewers...); err != nil {
|
||||
return fmt.Errorf("Cannot create reviews on %s/%s#%d for [%s]: %w", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", "), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData) bool {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
is_reviewed := false
|
||||
for _, pr := range rs.PRs {
|
||||
var reviewers []string
|
||||
var pkg string
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
pkg = ""
|
||||
} else {
|
||||
reviewers = configReviewers.Pkg
|
||||
pkg = pr.PR.Base.Repo.Name
|
||||
}
|
||||
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
is_reviewed = r.IsApproved()
|
||||
if !is_reviewed {
|
||||
return false
|
||||
}
|
||||
|
||||
if is_reviewed = maintainers.IsApproved(pkg, r.reviews); !is_reviewed {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return is_reviewed
|
||||
}
|
||||
|
||||
func (rs *PRSet) Merge(author, email string) error {
|
||||
prjgit, err := rs.GetPrjGitPR()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gh := GitHandlerGeneratorImpl{}
|
||||
git, err := gh.CreateGitHandler(author, email, prjgit.Base.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", "--depth", "1", prjgit.Base.Repo.SSHURL, DefaultGitPrj)
|
||||
git.GitExecOrPanic(DefaultGitPrj, "fetch", "origin", prjgit.Base.Sha, prjgit.Head.Sha)
|
||||
|
||||
// if other changes merged, check if we have conflicts
|
||||
rev := strings.TrimSpace(git.GitExecWithOutputOrPanic(DefaultGitPrj, "merge-base", "HEAD", prjgit.Base.Sha, prjgit.Head.Sha))
|
||||
if rev != prjgit.Base.Sha {
|
||||
return fmt.Errorf("Base.Sha (%s) not yet merged into project-git. Aborting merge.", prjgit.Base.Sha)
|
||||
}
|
||||
/*
|
||||
rev := git.GitExecWithOutputOrPanic(common.DefaultGitPrj, "rev-list", "-1", "HEAD")
|
||||
if rev != prjgit.Base.Sha {
|
||||
panic("FIXME")
|
||||
}
|
||||
*/
|
||||
msg := "merging"
|
||||
|
||||
err = git.GitExec(DefaultGitPrj, "merge", "--no-ff", "-m", msg, prjgit.Head.Sha)
|
||||
if err != nil {
|
||||
status, statusErr := git.GitStatus(DefaultGitPrj)
|
||||
if statusErr != nil {
|
||||
return fmt.Errorf("Failed to merge: %w . Status also failed: %w", err, statusErr)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
|
||||
submodules, err := git.GitSubmoduleList(DefaultGitPrj, "MERGE_HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
for i := range mergedSubs {
|
||||
if path.Base(mergedSubs[i].Path) == subName {
|
||||
filteredSubs = append(filteredSubs, mergedSubs[i])
|
||||
continue nextSub
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("Cannot find submodule for path: %s", subName)
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), DefaultGitPrj, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
os.CopyFS("/tmp/test", os.DirFS(git.GetPath()))
|
||||
|
||||
git.GitExecOrPanic(DefaultGitPrj, "add", ".gitmodules")
|
||||
git.GitExecOrPanic(DefaultGitPrj, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FF all non-prj git
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic("", "clone", prinfo.PR.Base.Repo.SSHURL, prinfo.PR.Base.Name)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "fetch", "origin", prinfo.PR.Head.Sha)
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "merge", "--ff", prinfo.PR.Head.Sha)
|
||||
}
|
||||
|
||||
// push changes
|
||||
git.GitExecOrPanic(DefaultGitPrj, "push", "origin")
|
||||
for _, prinfo := range rs.PRs {
|
||||
if rs.IsPrjGitPR(prinfo.PR) {
|
||||
continue
|
||||
}
|
||||
git.GitExecOrPanic(prinfo.PR.Base.Name, "push", "origin")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,554 +0,0 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestPR(t *testing.T) {
|
||||
baseConfig := common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
Organization: "foo",
|
||||
GitProjectName: "barPrj",
|
||||
}
|
||||
|
||||
type prdata struct {
|
||||
pr *models.PullRequest
|
||||
pr_err error
|
||||
reviews []*models.PullReview
|
||||
review_error error
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
data []prdata
|
||||
api_error string
|
||||
|
||||
resLen int
|
||||
reviewed bool
|
||||
consistentSet bool
|
||||
prjGitPRIndex int
|
||||
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRFetcher) (*common.PRSet, error)
|
||||
}{
|
||||
{
|
||||
name: "Error fetching PullRequest",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("Missing PR")},
|
||||
},
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Error fetching PullRequest in PrjGit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}, pr_err: errors.New("missing PR")},
|
||||
{pr: &models.PullRequest{Body: "", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Error fetching prjgit",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: -1,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
|
||||
{
|
||||
name: "Review set is consistent: 1pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#22", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
},
|
||||
resLen: 2,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "some desc", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "some other desc\nPR: foo/fer#33", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 1,
|
||||
consistentSet: true,
|
||||
},
|
||||
{
|
||||
name: "Review set of prjgit PR is consistent",
|
||||
data: []prdata{
|
||||
{
|
||||
pr: &models.PullRequest{Body: "", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}},
|
||||
reviews: []*models.PullReview{
|
||||
{Body: "LGTM", User: &models.User{UserName: "m2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: "super2"}, State: common.ReviewStateApproved},
|
||||
{Body: "LGTM", User: &models.User{UserName: common.Bot_BuildReview}, State: common.ReviewStateApproved},
|
||||
},
|
||||
},
|
||||
},
|
||||
resLen: 1,
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet(mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Review set is consistent: 2pkg",
|
||||
data: []prdata{
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#222", Index: 42, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "test"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo2#41", Index: 20, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: test/repo#42\nPR: test/repo2#41", Index: 22, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "barPrj", Owner: &models.User{UserName: "foo"}}}}},
|
||||
{pr: &models.PullRequest{Body: "PR: foo/barPrj#20", Index: 41, Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo2", Owner: &models.User{UserName: "test"}}}}},
|
||||
},
|
||||
resLen: 3,
|
||||
prjGitPRIndex: 2,
|
||||
consistentSet: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaPRChecker(ctl)
|
||||
// reviewer_mock := mock_common.NewMockGiteaReviewRequester(ctl)
|
||||
|
||||
if test.reviewSetFetcher == nil { // if we are fetching the prjgit directly, the these mocks are not called
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(test.data[test.prjGitPRIndex].pr, test.data[test.prjGitPRIndex].pr_err)
|
||||
} else if test.prjGitPRIndex < 0 {
|
||||
// no prjgit PR
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR(baseConfig.Organization, baseConfig.GitProjectName, test.data[0].pr.Base.Repo.Owner.UserName, test.data[0].pr.Base.Repo.Name, test.data[0].pr.Index).
|
||||
Return(nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
var test_err error
|
||||
for _, data := range test.data {
|
||||
pr_mock.EXPECT().GetPullRequest(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.pr, data.pr_err).AnyTimes()
|
||||
if data.pr_err != nil {
|
||||
test_err = data.pr_err
|
||||
}
|
||||
review_mock.EXPECT().GetPullRequestReviews(data.pr.Base.Repo.Owner.UserName, data.pr.Base.Repo.Name, data.pr.Index).Return(data.reviews, data.review_error).AnyTimes()
|
||||
}
|
||||
|
||||
var res *common.PRSet
|
||||
var err error
|
||||
|
||||
if test.reviewSetFetcher != nil {
|
||||
res, err = test.reviewSetFetcher(pr_mock)
|
||||
} else {
|
||||
res, err = common.FetchPRSet(pr_mock, "test", "repo", 42, &baseConfig)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
if test_err != nil {
|
||||
t.Fatal("Expected", test_err, "but got", err)
|
||||
}
|
||||
} else {
|
||||
if res != nil {
|
||||
t.Fatal("error but got ReviewSet?")
|
||||
}
|
||||
|
||||
if test.api_error != "" {
|
||||
if err.Error() != test.api_error {
|
||||
t.Fatal("expected", test.api_error, "but got", err)
|
||||
}
|
||||
} else if test_err != err {
|
||||
t.Fatal("expected", test_err, "but got", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if test.resLen != len(res.PRs) {
|
||||
t.Error("expected result len", test.resLen, "but got", len(res.PRs))
|
||||
}
|
||||
|
||||
PrjGitPR, err := res.GetPrjGitPR()
|
||||
if test.prjGitPRIndex < 0 {
|
||||
if err == nil {
|
||||
t.Error("expected error, but nothing")
|
||||
}
|
||||
}
|
||||
pr_found := false
|
||||
if test.prjGitPRIndex >= 0 {
|
||||
for i := range test.data {
|
||||
if PrjGitPR == test.data[i].pr && i == test.prjGitPRIndex {
|
||||
t.Log("found at index", i)
|
||||
pr_found = true
|
||||
}
|
||||
}
|
||||
if !pr_found {
|
||||
t.Error("Cannot find expected PrjGit location in PR set", PrjGitPR)
|
||||
}
|
||||
} else {
|
||||
if PrjGitPR != nil {
|
||||
t.Log("Expected prjgit not found, but found?", PrjGitPR)
|
||||
}
|
||||
}
|
||||
|
||||
if isConsistent := res.IsConsistent(); isConsistent != test.consistentSet {
|
||||
t.Error("IsConsistent() returned unexpected:", isConsistent)
|
||||
}
|
||||
/*
|
||||
if err := res.AssignReviewers(reviewer_mock); err != nil {
|
||||
t.Error("expected no errors assigning reviewers:", err)
|
||||
}
|
||||
*/
|
||||
|
||||
maintainers := mock_common.NewMockMaintainershipData(ctl)
|
||||
maintainers.EXPECT().IsApproved(gomock.Any(), gomock.Any()).Return(true).AnyTimes()
|
||||
|
||||
if isApproved := res.IsApproved(review_mock, maintainers); isApproved != test.reviewed {
|
||||
t.Error("expected reviewed to be NOT", isApproved)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRAssignReviewers(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
pkgReviews []*models.PullReview
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "No reviewers",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer and one pkg reviewer only",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"user2", "prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "Reviews are done",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{},
|
||||
},
|
||||
{
|
||||
name: "Stale review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "pkgrepo", Owner: &models.User{UserName: "other"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("other", "pkgrepo", int64(1)).Return(test.pkgReviews, nil)
|
||||
pr_mock.EXPECT().GetAssociatedPrjGitPR("org", "repo", "other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: fmt.Sprintf(common.PrPattern, "other", "pkgrepo", 1),
|
||||
User: &models.User{UserName: "bot1"},
|
||||
RequestedReviewers: []*models.User{{UserName: "main_reviewer"}},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 42,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo").Return([]string{"pkgmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "other", "pkgrepo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 2 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
r = test.expectedReviewerCall[1]
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
|
||||
prjgit_tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "PrjMaintainers in prjgit review when not part of pkg set",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "repo",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot", "prjmaintainer"}},
|
||||
},
|
||||
}
|
||||
for _, test := range prjgit_tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers().Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet(pr_mock, "org", "repo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 1 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
t.Fatal("only prjgit pr here")
|
||||
}
|
||||
if len(r) > 0 {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, r).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPRMerge(t *testing.T) {
|
||||
cwd, _ := os.Getwd()
|
||||
cmd := exec.Command("/usr/bin/bash", path.Join(cwd, "test_repo_setup.sh"))
|
||||
cmd.Dir = t.TempDir()
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
t.Fatal(string(out))
|
||||
}
|
||||
|
||||
common.ExtraGitParams = []string{
|
||||
"GIT_CONFIG_COUNT=1",
|
||||
"GIT_CONFIG_KEY_0=protocol.file.allow",
|
||||
"GIT_CONFIG_VALUE_0=always",
|
||||
|
||||
"GIT_AUTHOR_NAME=testname",
|
||||
"GIT_AUTHOR_EMAIL=test@suse.com",
|
||||
"GIT_AUTHOR_DATE='2005-04-07T22:13:13'",
|
||||
"GIT_COMMITTER_NAME=testname",
|
||||
"GIT_COMMITTER_EMAIL=test@suse.com",
|
||||
"GIT_COMMITTER_DATE='2005-04-07T22:13:13'",
|
||||
}
|
||||
|
||||
config := &common.AutogitConfig{
|
||||
Organization: "org",
|
||||
GitProjectName: "prj",
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
pr *models.PullRequest
|
||||
mergeError string
|
||||
}{
|
||||
{
|
||||
name: "Merge base not merged in main",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "e8b0de43d757c96a9d2c7101f4bff404e322f53a1fa4041fb85d646110c38ad4", // "base_add_b1"
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
mergeError: "Aborting merge",
|
||||
},
|
||||
{
|
||||
name: "Merge conflict in modules",
|
||||
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "4fbd1026b2d7462ebe9229a49100c11f1ad6555520a21ba515122d8bc41328a8",
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: path.Join(cmd.Dir, "prjgit"),
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "88584433de1c917c1d773f62b82381848d882491940b5e9b427a540aa9057d9a", // "base_add_b2"
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGiteaPRFetcher(ctl)
|
||||
|
||||
mock.EXPECT().GetPullRequest("org", "prj", int64(1)).Return(test.pr, nil)
|
||||
|
||||
set, err := common.FetchPRSet(mock, "org", "prj", 1, config)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = set.Merge("test", "test@example.com"); err != nil && (test.mergeError == "" || (len(test.mergeError) > 0 && !strings.Contains(err.Error(), test.mergeError))) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
)
|
||||
|
||||
type Reviewers struct {
|
||||
Prj []string
|
||||
Pkg []string
|
||||
}
|
||||
|
||||
func ParseReviewers(input []string) *Reviewers {
|
||||
r := &Reviewers{}
|
||||
for _, reviewer := range input {
|
||||
switch reviewer[0] {
|
||||
case '*':
|
||||
r.Prj = append(r.Prj, reviewer[1:])
|
||||
r.Pkg = append(r.Pkg, reviewer[1:])
|
||||
case '-':
|
||||
r.Prj = append(r.Prj, reviewer[1:])
|
||||
case '+':
|
||||
r.Pkg = append(r.Pkg, reviewer[1:])
|
||||
default:
|
||||
r.Pkg = append(r.Pkg, reviewer)
|
||||
}
|
||||
}
|
||||
|
||||
if !slices.Contains(r.Prj, Bot_BuildReview) {
|
||||
r.Prj = append(r.Prj, Bot_BuildReview)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestReviewers(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []string
|
||||
|
||||
prj []string
|
||||
pkg []string
|
||||
}{
|
||||
{
|
||||
name: "project and package reviewers",
|
||||
input: []string{"1", "2", "3", "*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"5", "7", common.Bot_BuildReview},
|
||||
pkg: []string{"1", "2", "3", "5", "6"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
reviewers := common.ParseReviewers(test.input)
|
||||
if !slices.Equal(reviewers.Prj, test.prj) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Prj)
|
||||
}
|
||||
if !slices.Equal(reviewers.Pkg, test.pkg) {
|
||||
t.Error("unexpected return of ForProject():", reviewers.Pkg)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type PRReviews struct {
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
}
|
||||
|
||||
func FetchGiteaReviews(rf GiteaReviewFetcher, reviewers []string, org, repo string, no int64) (*PRReviews, error) {
|
||||
reviews, err := rf.GetPullRequestReviews(org, repo, no)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &PRReviews{
|
||||
reviews: reviews,
|
||||
reviewers: reviewers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsApproved() bool {
|
||||
goodReview := false
|
||||
|
||||
for _, reviewer := range r.reviewers {
|
||||
goodReview = false
|
||||
for _, review := range r.reviews {
|
||||
if review.User.UserName == reviewer && review.State == ReviewStateApproved && !review.Stale {
|
||||
goodReview = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !goodReview {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return goodReview
|
||||
}
|
||||
|
||||
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
isPending := false
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
fallthrough
|
||||
case ReviewStateRequestChanges:
|
||||
return false
|
||||
case ReviewStateRequestReview:
|
||||
fallthrough
|
||||
case ReviewStatePending:
|
||||
isPending = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return isPending
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedBy(reviewer string) bool {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved:
|
||||
return true
|
||||
case ReviewStateRequestChanges:
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
@@ -1,140 +0,0 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestReviews(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
fetchErr error
|
||||
isApproved bool
|
||||
isReviewedByTest1 bool
|
||||
isPendingByTest1 bool
|
||||
}{
|
||||
{
|
||||
name: "Reviews of unreviews PR",
|
||||
isApproved: false,
|
||||
},
|
||||
{
|
||||
name: "Single reviewer done",
|
||||
reviews: []*models.PullReview{&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}}},
|
||||
reviewers: []string{"user1"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one not approved",
|
||||
reviews: []*models.PullReview{&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}}},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale approved",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one is pending",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale and pending",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: false,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer approved",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer approved, but fetch error",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
fetchErr: errors.New("System error fetching reviews."),
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
{
|
||||
name: "Extra reviewers are ignored",
|
||||
reviews: []*models.PullReview{
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user1"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user4"}},
|
||||
&models.PullReview{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: true,
|
||||
isReviewedByTest1: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
rf := mock_common.NewMockGiteaReviewFetcher(ctl)
|
||||
|
||||
rf.EXPECT().GetPullRequestReviews("test", "pr", int64(1)).Return(test.reviews, test.fetchErr)
|
||||
|
||||
reviews, err := common.FetchGiteaReviews(rf, test.reviewers, "test", "pr", 1)
|
||||
|
||||
if test.fetchErr != nil {
|
||||
if err != test.fetchErr {
|
||||
t.Fatal("FetchReviews() failed with unexpected error:", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if r := reviews.IsApproved(); r != test.isApproved {
|
||||
t.Fatal("Unexpected IsReviewed():", r, "vs. expected", test.isApproved)
|
||||
}
|
||||
|
||||
if r := reviews.HasPendingReviewBy("user1"); r != test.isPendingByTest1 {
|
||||
t.Fatal("Unexpected IsReviewPendingBy(user1):", r)
|
||||
}
|
||||
if r := reviews.IsReviewedBy("user1"); r != test.isReviewedByTest1 {
|
||||
t.Fatal("Unexpected IsReviewedBy(user1):", r)
|
||||
}
|
||||
|
||||
if r := reviews.HasPendingReviewBy("random"); r {
|
||||
t.Fatal("Unexpected IsReviewPendingBy(random):", r)
|
||||
}
|
||||
if r := reviews.IsReviewedBy("random"); r {
|
||||
t.Fatal("Unexpected IsReviewedBy(random):", r)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func SplitStringNoEmpty(str, sep string) []string {
|
||||
ret := slices.DeleteFunc(strings.Split(str, sep), func(s string) bool {
|
||||
return len(strings.TrimSpace(s)) == 0
|
||||
})
|
||||
for i := range ret {
|
||||
ret[i] = strings.TrimSpace(ret[i])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func TranslateHttpsToSshUrl(url string) (string, error) {
|
||||
const url1 = "https://src.opensuse.org/"
|
||||
const url2 = "https://src.suse.de/"
|
||||
|
||||
if len(url) > len(url1) && url[0:len(url1)] == url1 {
|
||||
return "gitea@src.opensuse.org:" + url[len(url1):], nil
|
||||
}
|
||||
if len(url) > len(url2) && url[0:len(url2)] == url2 {
|
||||
return "gitea@src.suse.de:" + url[len(url2):], nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("Unknown input url %s", url)
|
||||
}
|
||||
|
||||
17
common/Makefile
Normal file
17
common/Makefile
Normal file
@@ -0,0 +1,17 @@
|
||||
all: build
|
||||
|
||||
api.json::
|
||||
curl -o api.json https://src.opensuse.org/swagger.v1.json
|
||||
|
||||
gitea-generated/client/gitea_api_client.go: api.json
|
||||
[ -d gitea-generated ] || mkdir gitea-generated
|
||||
podman run --rm -v $$(pwd)/..:/api ghcr.io/go-swagger/go-swagger generate client -f /api/common/api.json -t /api/common/gitea-generated
|
||||
|
||||
swagger: gitea-generated/client/gitea_api_client.go
|
||||
|
||||
api:
|
||||
go generate
|
||||
|
||||
build: api
|
||||
go build
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,14 +11,14 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const PrPattern = "PR: %s/%s#%d"
|
||||
const PrPattern = "PR: %s/%s!%d"
|
||||
|
||||
type BasicPR struct {
|
||||
Org, Repo string
|
||||
Num int64
|
||||
}
|
||||
|
||||
var validOrgAndRepoRx *regexp.Regexp = regexp.MustCompile("^[A-Za-z0-9_-]+$")
|
||||
var validOrgAndRepoRx *regexp.Regexp = regexp.MustCompile("^[A-Za-z0-9_\\.-]+$")
|
||||
|
||||
func parsePrLine(line string) (BasicPR, error) {
|
||||
var ret BasicPR
|
||||
@@ -36,10 +36,14 @@ func parsePrLine(line string) (BasicPR, error) {
|
||||
return ret, errors.New("missing / separator")
|
||||
}
|
||||
|
||||
repo := strings.SplitN(org[1], "#", 2)
|
||||
repo := strings.SplitN(org[1], "!", 2)
|
||||
ret.Repo = repo[0]
|
||||
if len(repo) != 2 {
|
||||
return ret, errors.New("Missing # separator")
|
||||
repo = strings.SplitN(org[1], "#", 2)
|
||||
ret.Repo = repo[0]
|
||||
}
|
||||
if len(repo) != 2 {
|
||||
return ret, errors.New("Missing ! or # separator")
|
||||
}
|
||||
|
||||
// Gitea requires that each org and repo be [A-Za-z0-9_-]+
|
||||
@@ -14,6 +14,7 @@ func newStringScanner(s string) *bufio.Scanner {
|
||||
}
|
||||
|
||||
func TestAssociatedPRScanner(t *testing.T) {
|
||||
common.SetTestLogger(t)
|
||||
testTable := []struct {
|
||||
name string
|
||||
input string
|
||||
@@ -34,7 +35,7 @@ func TestAssociatedPRScanner(t *testing.T) {
|
||||
},
|
||||
{
|
||||
"Multiple PRs",
|
||||
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo#5\n",
|
||||
"Some header of the issue\n\nFollowed by some description\nPR: test/foo#4\n\nPR: test/goo!5\n",
|
||||
[]common.BasicPR{
|
||||
{Org: "test", Repo: "foo", Num: 4},
|
||||
{Org: "test", Repo: "goo", Num: 5},
|
||||
@@ -95,6 +96,7 @@ func TestAssociatedPRScanner(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAppendingPRsToDescription(t *testing.T) {
|
||||
common.SetTestLogger(t)
|
||||
testTable := []struct {
|
||||
name string
|
||||
desc string
|
||||
@@ -107,7 +109,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
[]common.BasicPR{
|
||||
{Org: "a", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a/b#100",
|
||||
"something\n\nPR: a/b!100",
|
||||
},
|
||||
{
|
||||
"Append multiple PR to end of description",
|
||||
@@ -119,7 +121,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
{Org: "b", Repo: "b", Num: 100},
|
||||
{Org: "c", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
|
||||
},
|
||||
{
|
||||
"Append multiple sorted PR to end of description and remove dups",
|
||||
@@ -133,7 +135,7 @@ func TestAppendingPRsToDescription(t *testing.T) {
|
||||
{Org: "a1", Repo: "c", Num: 101},
|
||||
{Org: "a1", Repo: "b", Num: 100},
|
||||
},
|
||||
"something\n\nPR: a1/b#100\nPR: a1/c#100\nPR: a1/c#101\nPR: b/b#100\nPR: c/b#100",
|
||||
"something\n\nPR: a1/b!100\nPR: a1/c!100\nPR: a1/c!101\nPR: b/b!100\nPR: c/b!100",
|
||||
},
|
||||
}
|
||||
|
||||
355
common/config.go
Normal file
355
common/config.go
Normal file
@@ -0,0 +1,355 @@
|
||||
package common
|
||||
|
||||
/*
|
||||
* This file is part of Autogits.
|
||||
*
|
||||
* Copyright © 2024 SUSE LLC
|
||||
*
|
||||
* Autogits is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 2 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* Autogits is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* Foobar. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/tailscale/hujson"
|
||||
)
|
||||
|
||||
//go:generate mockgen -source=config.go -destination=mock/config.go -typed
|
||||
|
||||
const (
|
||||
ProjectConfigFile = "workflow.config"
|
||||
StagingConfigFile = "staging.config"
|
||||
|
||||
Permission_ForceMerge = "force-merge"
|
||||
Permission_Group = "release-engineering"
|
||||
|
||||
MergeModeFF = "ff-only"
|
||||
MergeModeReplace = "replace"
|
||||
MergeModeDevel = "devel"
|
||||
)
|
||||
|
||||
type ConfigFile struct {
|
||||
GitProjectNames []string
|
||||
}
|
||||
|
||||
type ReviewGroup struct {
|
||||
Name string
|
||||
Silent bool // will not request reviews from group members
|
||||
Reviewers []string
|
||||
}
|
||||
|
||||
type QAConfig struct {
|
||||
Name string
|
||||
Origin string
|
||||
Label string // requires this gitea lable to be set or skipped
|
||||
Labels []string // requires any of the lables to be set
|
||||
BuildDisableRepos []string // which repos to build disable in the new project
|
||||
}
|
||||
|
||||
type Permissions struct {
|
||||
Permission string
|
||||
Members []string
|
||||
}
|
||||
|
||||
const (
|
||||
Label_StagingAuto = "staging/Auto"
|
||||
Label_ReviewPending = "review/Pending"
|
||||
Label_ReviewDone = "review/Done"
|
||||
Label_NewRepository = "new/New Repository"
|
||||
)
|
||||
|
||||
func LabelKey(tag_value string) string {
|
||||
// capitalize first letter and remove /
|
||||
if len(tag_value) == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToUpper(tag_value[0:1]) + strings.ReplaceAll(tag_value[1:], "/", "")
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
GitProjectName string // Organization/GitProjectName.git is PrjGit
|
||||
Branch string // branch name of PkgGit that aligns with PrjGit submodules
|
||||
Reviewers []string // only used by `pr` workflow
|
||||
Permissions []*Permissions // only used by `pr` workflow
|
||||
ReviewGroups []*ReviewGroup
|
||||
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
|
||||
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
|
||||
|
||||
Labels map[string]string // list of tags, if not default, to apply
|
||||
MergeMode string // project merge mode
|
||||
|
||||
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
|
||||
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
|
||||
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
|
||||
ReviewRequired bool // always require a maintainer review, even if maintainer submits it. Only ignored if no other package or project reviewers
|
||||
}
|
||||
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
|
||||
func ReadConfig(reader io.Reader) (*ConfigFile, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error reading config data: %w", err)
|
||||
}
|
||||
|
||||
config := ConfigFile{}
|
||||
data, err = hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to parse json: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(data, &config.GitProjectNames); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing Git Project paths: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadConfigFile(filename string) (*ConfigFile, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot open config file for reading. err: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return ReadConfig(file)
|
||||
}
|
||||
|
||||
type GiteaFileContentAndRepoFetcher interface {
|
||||
GiteaFileContentReader
|
||||
GiteaRepoFetcher
|
||||
}
|
||||
|
||||
func UnmarshalWorkflowConfig(data []byte) (*AutogitConfig, error) {
|
||||
var config AutogitConfig
|
||||
data, err := hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to parse json: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing workflow config file: %s: %w", string(data), err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func ReadWorkflowConfig(gitea GiteaFileContentAndRepoFetcher, git_project string) (*AutogitConfig, error) {
|
||||
hash := strings.Split(git_project, "#")
|
||||
branch := ""
|
||||
if len(hash) > 1 {
|
||||
branch = hash[1]
|
||||
}
|
||||
|
||||
a := strings.Split(hash[0], "/")
|
||||
prjGitRepo := DefaultGitPrj
|
||||
switch len(a) {
|
||||
case 1:
|
||||
case 2:
|
||||
prjGitRepo = a[1]
|
||||
default:
|
||||
return nil, fmt.Errorf("Missing org/repo in projectgit: %s", git_project)
|
||||
}
|
||||
|
||||
data, _, err := gitea.GetRepositoryFileContent(a[0], prjGitRepo, branch, ProjectConfigFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching 'workflow.config' for %s/%s#%s: %w", a[0], prjGitRepo, branch, err)
|
||||
}
|
||||
|
||||
config, err := UnmarshalWorkflowConfig(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(config.Organization) < 1 {
|
||||
config.Organization = a[0]
|
||||
}
|
||||
config.GitProjectName = a[0] + "/" + prjGitRepo
|
||||
if len(branch) == 0 {
|
||||
if r, err := gitea.GetRepository(a[0], prjGitRepo); err == nil {
|
||||
branch = r.DefaultBranch
|
||||
} else {
|
||||
return nil, fmt.Errorf("Failed to read workflow config in %s: %w", git_project, err)
|
||||
}
|
||||
}
|
||||
config.GitProjectName = config.GitProjectName + "#" + branch
|
||||
|
||||
// verify merge modes
|
||||
switch config.MergeMode {
|
||||
case MergeModeFF, MergeModeDevel, MergeModeReplace:
|
||||
break // good results
|
||||
case "":
|
||||
config.MergeMode = MergeModeFF
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported merge mode in %s: %s", git_project, config.MergeMode)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func ResolveWorkflowConfigs(gitea GiteaFileContentAndRepoFetcher, config *ConfigFile) (AutogitConfigs, error) {
|
||||
configs := make([]*AutogitConfig, 0, len(config.GitProjectNames))
|
||||
for _, git_project := range config.GitProjectNames {
|
||||
c, err := ReadWorkflowConfig(gitea, git_project)
|
||||
if err != nil {
|
||||
// can't sync, so ignore for now
|
||||
log.Println(err)
|
||||
} else {
|
||||
configs = append(configs, c)
|
||||
}
|
||||
}
|
||||
|
||||
return configs, nil
|
||||
}
|
||||
|
||||
func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *AutogitConfig {
|
||||
prjgit := org + "/" + repo + "#" + branch
|
||||
for _, c := range configs {
|
||||
if c.GitProjectName == prjgit {
|
||||
return c
|
||||
}
|
||||
}
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) HasPermission(user, permission string) bool {
|
||||
if config == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, p := range config.Permissions {
|
||||
if p.Permission == permission {
|
||||
if slices.Contains(p.Members, user) {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, m := range p.Members {
|
||||
if members, err := config.GetReviewGroupMembers(m); err == nil && slices.Contains(members, user) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetReviewGroupMembers(reviewer string) ([]string, error) {
|
||||
for _, g := range config.ReviewGroups {
|
||||
if g.Name == reviewer {
|
||||
return g.Reviewers, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetReviewGroup(reviewer string) (*ReviewGroup, error) {
|
||||
for _, g := range config.ReviewGroups {
|
||||
if g.Name == reviewer {
|
||||
return g, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("User " + reviewer + " not found as group reviewer for " + config.GitProjectName)
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetPrjGit() (string, string, string) {
|
||||
org := config.Organization
|
||||
repo := DefaultGitPrj
|
||||
branch := ""
|
||||
|
||||
a := strings.Split(config.GitProjectName, "/")
|
||||
if len(a[0]) > 0 {
|
||||
repo = strings.TrimSpace(a[0])
|
||||
}
|
||||
if len(a) == 2 {
|
||||
if a[0] = strings.TrimSpace(a[0]); len(a[0]) > 0 {
|
||||
org = a[0]
|
||||
}
|
||||
repo = strings.TrimSpace(a[1])
|
||||
}
|
||||
b := strings.Split(repo, "#")
|
||||
if len(b) == 2 {
|
||||
if b[0] = strings.TrimSpace(b[0]); len(b[0]) > 0 {
|
||||
repo = b[0]
|
||||
} else {
|
||||
repo = DefaultGitPrj
|
||||
}
|
||||
if b[1] = strings.TrimSpace(b[1]); len(b[1]) > 0 {
|
||||
branch = strings.TrimSpace(b[1])
|
||||
}
|
||||
}
|
||||
|
||||
if len(branch) == 0 {
|
||||
panic("branch for project is undefined. Should not happend." + org + "/" + repo)
|
||||
}
|
||||
return org, repo, branch
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) GetRemoteBranch() string {
|
||||
return "origin_" + config.Branch
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) Label(label string) string {
|
||||
if t, found := config.Labels[LabelKey(label)]; found {
|
||||
return t
|
||||
}
|
||||
|
||||
return label
|
||||
}
|
||||
|
||||
type StagingConfig struct {
|
||||
ObsProject string
|
||||
RebuildAll bool
|
||||
CleanupDelay int // cleanup delay, in hours, for unmerged closed PRs (def: 48)
|
||||
|
||||
// if set, then only use pull request numbers as unique identifiers
|
||||
StagingProject string
|
||||
QA []QAConfig
|
||||
}
|
||||
|
||||
func ParseStagingConfig(data []byte) (*StagingConfig, error) {
|
||||
var staging StagingConfig
|
||||
if len(data) == 0 {
|
||||
return nil, errors.New("non-existent config file.")
|
||||
}
|
||||
data, err := hujson.Standardize(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
staging.CleanupDelay = 48
|
||||
if err := json.Unmarshal(data, &staging); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// backward compability, transfer legacy Label to new Labels array
|
||||
for _, setup := range staging.QA {
|
||||
if len(setup.Labels) == 0 && len(setup.Label) > 0 {
|
||||
setup.Labels = []string{setup.Label}
|
||||
setup.Label = ""
|
||||
}
|
||||
}
|
||||
|
||||
return &staging, nil
|
||||
}
|
||||
408
common/config_test.go
Normal file
408
common/config_test.go
Normal file
@@ -0,0 +1,408 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestLabelKey(t *testing.T) {
|
||||
tests := map[string]string{
|
||||
"": "",
|
||||
"foo": "Foo",
|
||||
"foo/bar": "Foobar",
|
||||
"foo/Bar": "FooBar",
|
||||
}
|
||||
|
||||
for k, v := range tests {
|
||||
if c := common.LabelKey(k); c != v {
|
||||
t.Error("expected", v, "got", c, "input", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigLabelParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
json string
|
||||
label_value string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
json: "{}",
|
||||
label_value: "path/String",
|
||||
},
|
||||
{
|
||||
name: "defined",
|
||||
json: `{"Labels": {"foo": "bar", "PathString": "moo/Label"}}`,
|
||||
label_value: "moo/Label",
|
||||
},
|
||||
{
|
||||
name: "undefined",
|
||||
json: `{"Labels": {"foo": "bar", "NotPathString": "moo/Label"}}`,
|
||||
label_value: "path/String",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
repo := models.Repository{
|
||||
DefaultBranch: "master",
|
||||
}
|
||||
|
||||
ctl := NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if err != nil || config == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if l := config.Label("path/String"); l != test.label_value {
|
||||
t.Error("Expecting", test.label_value, "got", l)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProjectConfigMatcher(t *testing.T) {
|
||||
configs := common.AutogitConfigs{
|
||||
{
|
||||
Organization: "test",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
Branch: "main",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
Branch: "main",
|
||||
GitProjectName: "test/bar#never_match",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
GitProjectName: "test/bar#main",
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
org string
|
||||
repo string
|
||||
branch string
|
||||
config int
|
||||
}{
|
||||
{
|
||||
name: "invalid match",
|
||||
org: "foo",
|
||||
repo: "bar",
|
||||
config: -1,
|
||||
},
|
||||
{
|
||||
name: "default branch",
|
||||
org: "test",
|
||||
repo: "foo",
|
||||
branch: "",
|
||||
config: 0,
|
||||
},
|
||||
{
|
||||
name: "main branch",
|
||||
org: "test",
|
||||
repo: "foo",
|
||||
branch: "main",
|
||||
config: 1,
|
||||
},
|
||||
{
|
||||
name: "prjgit only match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "main",
|
||||
config: 3,
|
||||
},
|
||||
{
|
||||
name: "non-default branch match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "something_main",
|
||||
config: -1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c := configs.GetPrjGitConfig(test.org, test.repo, test.branch)
|
||||
if test.config < 0 {
|
||||
if c != nil {
|
||||
t.Fatal("Expected nil. Got:", *c)
|
||||
}
|
||||
} else if config := configs[test.config]; c != config {
|
||||
t.Fatal("Expected", *config, "got", *c)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigWorkflowParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
config_json string
|
||||
repo models.Repository
|
||||
}{
|
||||
{
|
||||
name: "Regular workflow file",
|
||||
config_json: `{
|
||||
"Workflows": ["direct", "pr"],
|
||||
"Organization": "testing",
|
||||
"ReviewGroups": [
|
||||
{
|
||||
"Name": "gnuman1",
|
||||
"Reviewers": ["adamm"]
|
||||
}
|
||||
]
|
||||
}`,
|
||||
repo: models.Repository{
|
||||
DefaultBranch: "master",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.config_json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&test.repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if config.ManualMergeOnly != false {
|
||||
t.Fatal("This should be false")
|
||||
}
|
||||
|
||||
if config.Label("foobar") != "foobar" {
|
||||
t.Fatal("undefined label should return default value")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: should test ReadWorkflowConfig as it will always set prjgit completely
|
||||
func TestProjectGitParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
prjgit string
|
||||
org string
|
||||
branch string
|
||||
res [3]string
|
||||
}{
|
||||
{
|
||||
name: "repo only",
|
||||
prjgit: "repo.git#master",
|
||||
org: "org",
|
||||
branch: "br",
|
||||
res: [3]string{"org", "repo.git", "master"},
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
org: "org",
|
||||
prjgit: "org/_ObsPrj#master",
|
||||
res: [3]string{"org", common.DefaultGitPrj, "master"},
|
||||
},
|
||||
{
|
||||
name: "repo with branch",
|
||||
org: "org2",
|
||||
prjgit: "org2/repo.git#somebranch",
|
||||
res: [3]string{"org2", "repo.git", "somebranch"},
|
||||
},
|
||||
{
|
||||
name: "repo org and branch",
|
||||
org: "org3",
|
||||
prjgit: "oorg/foo.bar#point",
|
||||
res: [3]string{"oorg", "foo.bar", "point"},
|
||||
},
|
||||
{
|
||||
name: "whitespace shouldn't matter",
|
||||
prjgit: " oorg / \nfoo.bar\t # point ",
|
||||
res: [3]string{"oorg", "foo.bar", "point"},
|
||||
},
|
||||
{
|
||||
name: "repo org and empty branch",
|
||||
org: "org3",
|
||||
prjgit: "oorg/foo.bar#master",
|
||||
res: [3]string{"oorg", "foo.bar", "master"},
|
||||
},
|
||||
{
|
||||
name: "only branch defined",
|
||||
org: "org3",
|
||||
prjgit: "org3/_ObsPrj#mybranch",
|
||||
res: [3]string{"org3", "_ObsPrj", "mybranch"},
|
||||
},
|
||||
{
|
||||
name: "only org and branch defined",
|
||||
org: "org3",
|
||||
prjgit: "org1/_ObsPrj#mybranch",
|
||||
res: [3]string{"org1", "_ObsPrj", "mybranch"},
|
||||
},
|
||||
{
|
||||
name: "empty org and repo",
|
||||
org: "org3",
|
||||
prjgit: "org3/repo#master",
|
||||
res: [3]string{"org3", "repo", "master"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c := &common.AutogitConfig{
|
||||
Organization: test.org,
|
||||
Branch: test.branch,
|
||||
GitProjectName: test.prjgit,
|
||||
}
|
||||
|
||||
i, j, k := c.GetPrjGit()
|
||||
res := []string{i, j, k}
|
||||
if !slices.Equal(res, test.res[:]) {
|
||||
t.Error("Expected", test.res, "but received", res)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigPermissions(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
permission string
|
||||
user string
|
||||
config *common.AutogitConfig
|
||||
result bool
|
||||
}{
|
||||
{
|
||||
name: "NoPermissions",
|
||||
permission: common.Permission_ForceMerge,
|
||||
},
|
||||
{
|
||||
name: "NoPermissions",
|
||||
permission: common.Permission_Group,
|
||||
},
|
||||
{
|
||||
name: "Regular permission ForcePush",
|
||||
permission: common.Permission_ForceMerge,
|
||||
result: true,
|
||||
user: "user",
|
||||
config: &common.AutogitConfig{
|
||||
Permissions: []*common.Permissions{
|
||||
&common.Permissions{
|
||||
Permission: common.Permission_ForceMerge,
|
||||
Members: []string{"user"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "User is part of a group",
|
||||
permission: common.Permission_ForceMerge,
|
||||
result: true,
|
||||
user: "user",
|
||||
config: &common.AutogitConfig{
|
||||
Permissions: []*common.Permissions{
|
||||
&common.Permissions{
|
||||
Permission: common.Permission_ForceMerge,
|
||||
Members: []string{"group"},
|
||||
},
|
||||
},
|
||||
ReviewGroups: []*common.ReviewGroup{
|
||||
&common.ReviewGroup{
|
||||
Name: "group",
|
||||
Reviewers: []string{"some", "members", "including", "user"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if r := test.config.HasPermission(test.user, test.permission); r != test.result {
|
||||
t.Error("Expecting", test.result, "but got opposite")
|
||||
}
|
||||
if r := test.config.HasPermission(test.user+test.user, test.permission); r {
|
||||
t.Error("Expecting false for fake user, but got opposite")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigMergeModeParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
json string
|
||||
mergeMode string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
json: "{}",
|
||||
mergeMode: common.MergeModeFF,
|
||||
},
|
||||
{
|
||||
name: "ff-only",
|
||||
json: `{"MergeMode": "ff-only"}`,
|
||||
mergeMode: common.MergeModeFF,
|
||||
},
|
||||
{
|
||||
name: "replace",
|
||||
json: `{"MergeMode": "replace"}`,
|
||||
mergeMode: common.MergeModeReplace,
|
||||
},
|
||||
{
|
||||
name: "devel",
|
||||
json: `{"MergeMode": "devel"}`,
|
||||
mergeMode: common.MergeModeDevel,
|
||||
},
|
||||
{
|
||||
name: "unsupported",
|
||||
json: `{"MergeMode": "invalid"}`,
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
repo := models.Repository{
|
||||
DefaultBranch: "master",
|
||||
}
|
||||
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if test.wantErr {
|
||||
if err == nil {
|
||||
t.Fatal("Expected error, got nil")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if config.MergeMode != test.mergeMode {
|
||||
t.Errorf("Expected MergeMode %s, got %s", test.mergeMode, config.MergeMode)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,14 @@ package common
|
||||
*/
|
||||
|
||||
const (
|
||||
GiteaTokenEnv = "GITEA_TOKEN"
|
||||
ObsUserEnv = "OBS_USER"
|
||||
ObsPasswordEnv = "OBS_PASSWORD"
|
||||
GiteaTokenEnv = "GITEA_TOKEN"
|
||||
GiteaHostEnv = "GITEA_HOST"
|
||||
ObsUserEnv = "OBS_USER"
|
||||
ObsPasswordEnv = "OBS_PASSWORD"
|
||||
ObsSshkeyEnv = "OBS_SSHKEY"
|
||||
ObsSshkeyFileEnv = "OBS_SSHKEYFILE"
|
||||
ObsApiEnv = "OBS_API"
|
||||
ObsWebEnv = "OBS_WEB"
|
||||
|
||||
DefaultGitPrj = "_ObsPrj"
|
||||
PrjLinksFile = "links.json"
|
||||
@@ -31,3 +36,6 @@ const (
|
||||
|
||||
TopicApp = "src"
|
||||
)
|
||||
|
||||
// when set, pushing to remote does not happen, and other remote side-effects should also not happen
|
||||
var IsDryRun bool
|
||||
@@ -1731,3 +1731,246 @@ const requestedReviewJSON = `{
|
||||
"commit_id": "",
|
||||
"review": null
|
||||
}`
|
||||
|
||||
const requestStatusJSON=`{
|
||||
"commit": {
|
||||
"id": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
|
||||
"message": "Update nodejs-common.changes\n",
|
||||
"url": "https://src.opensuse.org/autogits/nodejs-common/commit/e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
|
||||
"author": {
|
||||
"name": "Adam Majer",
|
||||
"email": "adamm@noreply.src.opensuse.org",
|
||||
"username": "adamm"
|
||||
},
|
||||
"committer": {
|
||||
"name": "Adam Majer",
|
||||
"email": "adamm@noreply.src.opensuse.org",
|
||||
"username": "adamm"
|
||||
},
|
||||
"verification": null,
|
||||
"timestamp": "2025-09-16T12:41:02+02:00",
|
||||
"added": [],
|
||||
"removed": [],
|
||||
"modified": [
|
||||
"nodejs-common.changes"
|
||||
]
|
||||
},
|
||||
"context": "test",
|
||||
"created_at": "2025-09-16T10:50:32Z",
|
||||
"description": "",
|
||||
"id": 21663,
|
||||
"repository": {
|
||||
"id": 90520,
|
||||
"owner": {
|
||||
"id": 983,
|
||||
"login": "autogits",
|
||||
"login_name": "",
|
||||
"source_id": 0,
|
||||
"full_name": "",
|
||||
"email": "",
|
||||
"avatar_url": "https://src.opensuse.org/avatars/80a61ef3a14c3c22f0b8b1885d1a75d4",
|
||||
"html_url": "https://src.opensuse.org/autogits",
|
||||
"language": "",
|
||||
"is_admin": false,
|
||||
"last_login": "0001-01-01T00:00:00Z",
|
||||
"created": "2024-06-20T09:46:37+02:00",
|
||||
"restricted": false,
|
||||
"active": false,
|
||||
"prohibit_login": false,
|
||||
"location": "",
|
||||
"website": "",
|
||||
"description": "",
|
||||
"visibility": "public",
|
||||
"followers_count": 0,
|
||||
"following_count": 0,
|
||||
"starred_repos_count": 0,
|
||||
"username": "autogits"
|
||||
},
|
||||
"name": "nodejs-common",
|
||||
"full_name": "autogits/nodejs-common",
|
||||
"description": "",
|
||||
"empty": false,
|
||||
"private": false,
|
||||
"fork": true,
|
||||
"template": false,
|
||||
"parent": {
|
||||
"id": 62649,
|
||||
"owner": {
|
||||
"id": 64,
|
||||
"login": "pool",
|
||||
"login_name": "",
|
||||
"source_id": 0,
|
||||
"full_name": "",
|
||||
"email": "",
|
||||
"avatar_url": "https://src.opensuse.org/avatars/b10a8c0bede9eb4ea771b04db3149f28",
|
||||
"html_url": "https://src.opensuse.org/pool",
|
||||
"language": "",
|
||||
"is_admin": false,
|
||||
"last_login": "0001-01-01T00:00:00Z",
|
||||
"created": "2023-03-01T14:41:17+01:00",
|
||||
"restricted": false,
|
||||
"active": false,
|
||||
"prohibit_login": false,
|
||||
"location": "",
|
||||
"website": "",
|
||||
"description": "",
|
||||
"visibility": "public",
|
||||
"followers_count": 2,
|
||||
"following_count": 0,
|
||||
"starred_repos_count": 0,
|
||||
"username": "pool"
|
||||
},
|
||||
"name": "nodejs-common",
|
||||
"full_name": "pool/nodejs-common",
|
||||
"description": "",
|
||||
"empty": false,
|
||||
"private": false,
|
||||
"fork": false,
|
||||
"template": false,
|
||||
"mirror": false,
|
||||
"size": 134,
|
||||
"language": "",
|
||||
"languages_url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common/languages",
|
||||
"html_url": "https://src.opensuse.org/pool/nodejs-common",
|
||||
"url": "https://src.opensuse.org/api/v1/repos/pool/nodejs-common",
|
||||
"link": "",
|
||||
"ssh_url": "gitea@src.opensuse.org:pool/nodejs-common.git",
|
||||
"clone_url": "https://src.opensuse.org/pool/nodejs-common.git",
|
||||
"original_url": "",
|
||||
"website": "",
|
||||
"stars_count": 0,
|
||||
"forks_count": 3,
|
||||
"watchers_count": 12,
|
||||
"open_issues_count": 0,
|
||||
"open_pr_counter": 0,
|
||||
"release_counter": 0,
|
||||
"default_branch": "factory",
|
||||
"archived": false,
|
||||
"created_at": "2024-06-17T17:08:45+02:00",
|
||||
"updated_at": "2025-08-21T21:58:31+02:00",
|
||||
"archived_at": "1970-01-01T01:00:00+01:00",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"push": true,
|
||||
"pull": true
|
||||
},
|
||||
"has_issues": true,
|
||||
"internal_tracker": {
|
||||
"enable_time_tracker": false,
|
||||
"allow_only_contributors_to_track_time": true,
|
||||
"enable_issue_dependencies": true
|
||||
},
|
||||
"has_wiki": false,
|
||||
"has_pull_requests": true,
|
||||
"has_projects": false,
|
||||
"projects_mode": "all",
|
||||
"has_releases": false,
|
||||
"has_packages": false,
|
||||
"has_actions": false,
|
||||
"ignore_whitespace_conflicts": false,
|
||||
"allow_merge_commits": true,
|
||||
"allow_rebase": true,
|
||||
"allow_rebase_explicit": true,
|
||||
"allow_squash_merge": true,
|
||||
"allow_fast_forward_only_merge": true,
|
||||
"allow_rebase_update": true,
|
||||
"allow_manual_merge": true,
|
||||
"autodetect_manual_merge": true,
|
||||
"default_delete_branch_after_merge": false,
|
||||
"default_merge_style": "merge",
|
||||
"default_allow_maintainer_edit": false,
|
||||
"avatar_url": "",
|
||||
"internal": false,
|
||||
"mirror_interval": "",
|
||||
"object_format_name": "sha256",
|
||||
"mirror_updated": "0001-01-01T00:00:00Z",
|
||||
"topics": [],
|
||||
"licenses": []
|
||||
},
|
||||
"mirror": false,
|
||||
"size": 143,
|
||||
"language": "",
|
||||
"languages_url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common/languages",
|
||||
"html_url": "https://src.opensuse.org/autogits/nodejs-common",
|
||||
"url": "https://src.opensuse.org/api/v1/repos/autogits/nodejs-common",
|
||||
"link": "",
|
||||
"ssh_url": "gitea@src.opensuse.org:autogits/nodejs-common.git",
|
||||
"clone_url": "https://src.opensuse.org/autogits/nodejs-common.git",
|
||||
"original_url": "",
|
||||
"website": "",
|
||||
"stars_count": 0,
|
||||
"forks_count": 1,
|
||||
"watchers_count": 4,
|
||||
"open_issues_count": 0,
|
||||
"open_pr_counter": 1,
|
||||
"release_counter": 0,
|
||||
"default_branch": "factory",
|
||||
"archived": false,
|
||||
"created_at": "2024-07-01T13:29:03+02:00",
|
||||
"updated_at": "2025-09-16T12:41:03+02:00",
|
||||
"archived_at": "1970-01-01T01:00:00+01:00",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"push": true,
|
||||
"pull": true
|
||||
},
|
||||
"has_issues": false,
|
||||
"has_wiki": false,
|
||||
"has_pull_requests": true,
|
||||
"has_projects": false,
|
||||
"projects_mode": "all",
|
||||
"has_releases": false,
|
||||
"has_packages": false,
|
||||
"has_actions": false,
|
||||
"ignore_whitespace_conflicts": false,
|
||||
"allow_merge_commits": true,
|
||||
"allow_rebase": true,
|
||||
"allow_rebase_explicit": true,
|
||||
"allow_squash_merge": true,
|
||||
"allow_fast_forward_only_merge": true,
|
||||
"allow_rebase_update": true,
|
||||
"allow_manual_merge": true,
|
||||
"autodetect_manual_merge": true,
|
||||
"default_delete_branch_after_merge": false,
|
||||
"default_merge_style": "merge",
|
||||
"default_allow_maintainer_edit": false,
|
||||
"avatar_url": "",
|
||||
"internal": false,
|
||||
"mirror_interval": "",
|
||||
"object_format_name": "sha256",
|
||||
"mirror_updated": "0001-01-01T00:00:00Z",
|
||||
"topics": [],
|
||||
"licenses": [
|
||||
"MIT"
|
||||
]
|
||||
},
|
||||
"sender": {
|
||||
"id": 129,
|
||||
"login": "adamm",
|
||||
"login_name": "",
|
||||
"source_id": 0,
|
||||
"full_name": "Adam Majer",
|
||||
"email": "adamm@noreply.src.opensuse.org",
|
||||
"avatar_url": "https://src.opensuse.org/avatars/3e8917bfbf04293f7c20c28cacd83dae2ba9b78a6c6a9a1bedf14c683d8a3763",
|
||||
"html_url": "https://src.opensuse.org/adamm",
|
||||
"language": "",
|
||||
"is_admin": false,
|
||||
"last_login": "0001-01-01T00:00:00Z",
|
||||
"created": "2023-07-21T16:43:48+02:00",
|
||||
"restricted": false,
|
||||
"active": false,
|
||||
"prohibit_login": false,
|
||||
"location": "",
|
||||
"website": "",
|
||||
"description": "",
|
||||
"visibility": "public",
|
||||
"followers_count": 1,
|
||||
"following_count": 0,
|
||||
"starred_repos_count": 0,
|
||||
"username": "adamm"
|
||||
},
|
||||
"sha": "e637d86cbbdd438edbf60148e28f9d75a74d51b27b01f75610f247cd18394c8e",
|
||||
"state": "pending",
|
||||
"target_url": "https://src.opensuse.org/",
|
||||
"updated_at": "2025-09-16T10:50:32Z"
|
||||
}`
|
||||
1464
common/git_utils.go
Normal file
1464
common/git_utils.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -24,12 +24,131 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestGitClone(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
repo string
|
||||
branch string
|
||||
remoteName string
|
||||
remoteUrl string
|
||||
}{
|
||||
{
|
||||
name: "Basic clone",
|
||||
repo: "pkgAclone",
|
||||
branch: "main",
|
||||
remoteName: "pkgA_main",
|
||||
remoteUrl: "/pkgA",
|
||||
},
|
||||
{
|
||||
name: "Remote branch is non-existent",
|
||||
repo: "pkgAclone",
|
||||
branch: "main_not_here",
|
||||
remoteName: "pkgA_main",
|
||||
remoteUrl: "/pkgA",
|
||||
},
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
execPath, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
d := t.TempDir()
|
||||
os.Chdir(d)
|
||||
defer os.Chdir(execPath)
|
||||
cmd := exec.Command(path.Join(execPath, "test_clone_setup.sh"))
|
||||
if _, err := cmd.Output(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(d, "Test", "test@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
g, err := gh.CreateGitHandler("org")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if _, err := g.GitClone(test.repo, test.branch, "file://"+d+test.remoteUrl); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
id, err := g.GitBranchHead(test.repo, test.branch)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Fatal(id)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitCloneCommitID(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
execPath, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
d := t.TempDir()
|
||||
if err := os.Chdir(d); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.Chdir(execPath)
|
||||
cmd := exec.Command(path.Join(execPath, "test_repo_setup.sh"))
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
t.Log(string(out))
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(d, "Test", "test@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
g, err := gh.CreateGitHandler("org")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Get a commit ID from pkgA
|
||||
remoteUrl := "file://" + d + "/pkgA"
|
||||
out, err := exec.Command("git", "-C", path.Join(d, "pkgA"), "rev-parse", "main").Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
commitID := strings.TrimSpace(string(out))
|
||||
|
||||
repo := "pkgAcloneCommitID"
|
||||
if _, err := g.GitClone(repo, commitID, remoteUrl); err != nil {
|
||||
t.Skip("TODO: Add GitClone CommitID support")
|
||||
t.Fatalf("GitClone failed with commit ID: %v", err)
|
||||
}
|
||||
|
||||
// Verify we are at the right commit
|
||||
head, err := g.GitBranchHead(repo, commitID)
|
||||
if err != nil {
|
||||
t.Fatalf("GitBranchHead failed: %v", err)
|
||||
}
|
||||
if head != commitID {
|
||||
t.Errorf("Expected head %s, got %s", commitID, head)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitMsgParsing(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
t.Run("tree message with size 56", func(t *testing.T) {
|
||||
const hdr = "f40888ea4515fe2e8eea617a16f5f50a45f652d894de3ad181d58de3aafb8f98 tree 56\x00"
|
||||
|
||||
@@ -108,6 +227,7 @@ func TestGitMsgParsing(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGitCommitParsing(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
t.Run("parse valid commit message", func(t *testing.T) {
|
||||
const commitData = "f40888ea4515fe2e8eea617a16f5f50a45f652d894de3ad181d58de3aafb8f99 commit 253\000" +
|
||||
`tree e20033df9f18780756ba4a96dbc7eb1a626253961039cb674156f266ba7a4e53
|
||||
@@ -136,7 +256,7 @@ committer Adam Majer <amajer@suse.com> 1720709149 +0200
|
||||
})
|
||||
|
||||
t.Run("parse multiline headers", func(t *testing.T) {
|
||||
const commitData = "cae5831ab48470ff060a5aaa12eb6e5a7acaf91e commit 1491\x00" +
|
||||
const commitData = "cae5831ab48470ff060a5aaa12eb6e5a7acaf91e commit 1492\000" +
|
||||
`tree 1f9c8fe8099615d6d3921528402ac53f09213b02
|
||||
parent e08a654fae0ecc91678819e0b62a2e014bad3339
|
||||
author Yagiz Nizipli <yagiz@nizipli.com> 1720967314 -0400
|
||||
@@ -168,7 +288,7 @@ Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
|
||||
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
|
||||
Reviewed-By: Ulises Gascón <ulisesgascongonzalez@gmail.com>
|
||||
Reviewed-By: Richard Lau <rlau@redhat.com>
|
||||
Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\000"
|
||||
|
||||
ch := make(chan byte, 5000)
|
||||
for _, b := range []byte(commitData) {
|
||||
@@ -189,6 +309,51 @@ Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse multiline headers", func(t *testing.T) {
|
||||
const commitData = "c07c52c57a10fb355956df3caad2986613838f149274fbe312ad76560764829d commit 1150\000" + `tree 3e06b280ea056141ed5e8af9794a41ae5281930c45321803eab53a240cb60044
|
||||
parent 19362a2cecb1fd25a89e03611d08ac68dcb1732f9dc0a68a40926356787fa4ca
|
||||
author Adrian Schröter <adrian@suse.de> 1746600403 +0200
|
||||
committer Adrian Schröter <adrian@suse.de> 1746600403 +0200
|
||||
gpgsig-sha256 -----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAABCgAdFiEE1QF1zm/pNbvyhgLFkY2MlUwI22cFAmgbAd0ACgkQkY2MlUwI
|
||||
22dxtA//eUCzIqxVdaEnOrFeTyxKig/mCOjaAyctmwr0vXUyElRtjXe4TzVG3QtR
|
||||
uDfhIrKYLZ2tU/0TewTW/4XopWxLuqEzVQLrjuYl7K5P3GoYk52W1yGT0szzm7/i
|
||||
87j4UdRL9YGU/gYO7nSzstcfTP6AcmYzVUoOnwYR0K2vyOVjO4niL3mFXxLkIgIt
|
||||
jd82xcE4JpQz9Yjyq2nDdz4A55kLAwsqY+dOct4oC6bZmj1/JeoGQfPvUsvsQgcI
|
||||
syCHVh0GBxjvSv50V/VPzxQTFMal/TdtvAD4kmP/9RDi/5THzus8Peam8pV0gEIC
|
||||
Q15ZcuLwIsC9i7ifUDYgzLgBBRdpSI0qji4Y6clWULPVjsyghgyfQw1trBSySpC8
|
||||
O1XfajUM+rXyrBLP6kzY+zl/zyzRdJ8JhljmC+SmNuyyEB77Hkn83k0f+aBhhqC2
|
||||
4b3fIsKtwJZ1w6gr6SSz1BottiT9ShQzRaL8iRoF/2l5MkHPR+QFg2J7EIBqCbCQ
|
||||
hFUjdvWAXQBWkkTQlJmLmJBXDOLQg3o6xCbnZM0gPFjZWE7e3Mpky7H0+xPnoeg9
|
||||
ukuvkexXQ6yrdiekA7HRLc76Te/I0m7KDOOWZ3rbJV6uH/3ps4FbLQTZO12AtZ6J
|
||||
n8hYdYfw9yjCxiKUjnEtXtDRe8DJpqv+hO0Wj4MI5gIA2JE2lzY=
|
||||
=Keg5
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
dummy change, don't merge
|
||||
` + "\000"
|
||||
ch := make(chan byte)
|
||||
go func() {
|
||||
for _, b := range []byte(commitData) {
|
||||
ch <- b
|
||||
}
|
||||
}()
|
||||
commit, err := parseGitCommit(ch)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if commit.Tree != "3e06b280ea056141ed5e8af9794a41ae5281930c45321803eab53a240cb60044" {
|
||||
t.Errorf("Invalid commit object: %#v", commit)
|
||||
}
|
||||
|
||||
if commit.Msg != "dummy change, don't merge\n" {
|
||||
t.Errorf("Invalid commit msg: '%s'", commit.Msg)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse tree object", func(t *testing.T) {
|
||||
const treeData = "\x31\x61\x30\x35\x64\x62\x37\x33\x36\x39\x33\x37\x34\x33\x30\x65\x31\x38\x64\x66\x34\x33\x61\x32\x37\x61\x39\x38\x30\x30\x31\x30\x31\x32\x65\x31\x65\x64\x32\x30\x34\x38\x32\x39\x38\x36\x37\x31\x32\x38\x66\x32\x63\x65\x38\x34\x30\x36\x62\x35\x63\x66\x63\x39\x20\x74\x72\x65\x65\x20\x32\x30\x35\x00\x34\x30\x30\x30\x30\x20\x62\x6f\x74\x73\x2d\x63\x6f\x6d\x6d\x6f\x6e\x00\x93\x17\xaa\x47\xf6\xea\x37\xe8\xbc\xe2\x80\x77\x57\x90\xf4\xa8\x01\xd7\xe3\x70\x2f\x84\xfb\xe1\xb0\x0e\x4a\x2c\x1c\x75\x2c\x2b\x34\x30\x30\x30\x30\x20\x6f\x62\x73\x2d\x73\x74\x61\x67\x69\x6e\x67\x2d\x62\x6f\x74\x00\x79\x77\x8b\x28\x7d\x37\x10\x59\xb9\x71\x28\x36\xed\x20\x31\x5f\xfb\xe1\xed\xb5\xba\x4f\x5e\xbb\x65\x65\x68\x23\x77\x32\x58\xfe\x34\x30\x30\x30\x30\x20\x70\x72\x2d\x72\x65\x76\x69\x65\x77\x00\x36\x0d\x45\xcb\x76\xb8\x93\xb3\x21\xba\xfa\xd5\x00\x9d\xfc\x59\xab\x88\xc1\x3c\x81\xcb\x48\x5a\xe0\x29\x29\x0f\xe3\x6b\x3c\x5e\x34\x30\x30\x30\x30\x20\x70\x72\x6a\x67\x69\x74\x2d\x75\x70\x64\x61\x74\x65\x72\x00\xb4\x0b\x1c\xf5\xfb\xec\x9a\xb2\x9f\x48\x3e\x21\x18\x0d\x51\xb7\x98\x6e\x21\x99\x74\x84\x67\x71\x41\x24\x42\xfc\xc9\x04\x12\x99\x00"
|
||||
|
||||
@@ -243,9 +408,37 @@ Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com>` + "\x00"
|
||||
t.Error("expected submodule not found")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse nested trees with subtrees", func(t *testing.T) {
|
||||
const data = "873a323b262ebb3bd77b2592b2e11bdd08dbc721cbf4ac9f97637e58e1fffce7 tree 1083\x00100644\x20\x2Egitattributes\x00\xD8v\xA95\x87\xC1\xA9\xFCPn\xDD\xD4\x13\x9B\x8E\xD2\xCFs\xBD\x11q\x8A\xAE\x8A\x7Cg\xE2C\x14J\x01\xB0100644\x20\x2Egitignore\x00\xC3\xCD\x8En\x887\x3AJ\xA0P\xEEL\xD4\xF5\xD2v\x9C\xA6v\xC5D\x60\x40\x95\xD1\x0B\xA4\xB8\x86\xD4rE100644\x20COPYING\x00\x12\x2A\x28\xC8\xB9\x5D\x9B\x8A\x23\x1F\xE96\x07\x3F\xA9D\x90\xFD\xCE\x2Bi\x2D\x031\x5C\xCC\xC4fx\x00\xC22100644\x20README\x2Emd\x00\x92D\xF7\xFF\x0E0\x5C\xF2\xAC\x0DA\x06\x92\x0B\xD6z\x3CGh\x00y\x7EW1\xB9a\x8Ch\x215Fa100644\x20_service\x00\xC51\xF2\x12\xF3\x24\x9C\xD9\x9F\x0A\x93Mp\x12\xC1\xF7i\x05\x95\xC5Z\x06\x95i\x3Az\xC3\xF59\x7E\xF8\x1B100644\x20autogits\x2Echanges\x00\xF7\x8D\xBF\x0A\xCB\x5D\xB7y\x8C\xA9\x9C\xEB\x92\xAFd\x2C\x98\x23\x0C\x13\x13\xED\xDE\x5D\xBALD6\x3BR\x5B\xCA100644\x20autogits\x2Espec\x00\xD2\xBC\x20v\xD3\xE5F\xCA\xEE\xEA\x18\xC84\x0D\xA7\xCA\xD8O\xF2\x0A\xAB\x40\x2A\xFAL\x3B\xB4\xE6\x11\xE7o\xD140000\x20common\x00\xE2\xC9dg\xD0\x5D\xD1\xF1\x8ARW\xF0\x96\xD6\x29\x2F\x8F\xD9\xC7\x82\x1A\xB7\xAAw\xB0\xCE\xA8\xFE\xC8\xD7D\xF2100755\x20dev_test_helper\x2Esh\x00\xECY\xDD\xB3rz\x9Fh\xD4\x2E\x85\x02\x13\xF8\xFE\xB57\x8B\x1B6\x8E\x09dC\x1E\xE0\x90\x09\x08\xED\xBD_40000\x20devel\x2Dimporter\x00v\x98\x9B\x92\xD8\x24lu\xFC\xB2d\xC9\xCENb\xEE\x0F\x21\x8B\x92\x88\xDBs\xF8\x2E\xA8\xC8W\x1C\x20\xCF\xD440000\x20doc\x00\x8Akyq\xD0\xCF\xB8\x2F\x80Y\x2F\x11\xF0\x14\xA9\xFE\x96\x14\xE0W\x2C\xCF\xB9\x86\x7E\xFDi\xD7\x1F\x08Q\xFB40000\x20gitea\x2Devents\x2Drabbitmq\x2Dpublisher\x00\x5Cb\x3Fh\xA2\x06\x06\x0Cd\x09\xA5\xD9\xF7\x23\x5C\xF85\xF5\xB8\xBE\x7F\xD4O\x25t\xEF\xCC\xAB\x18\x7C\x0C\xF3100644\x20go\x2Emod\x00j\x85\x0B\x03\xC8\x9F\x9F\x0F\xC8\xE0\x8C\xF7\x3D\xC19\xF7\x12gk\xD6\x18JN\x24\xC0\x1C\xBE\x97oY\x02\x8D100644\x20go\x2Esum\x00h\x88\x2E\x27\xED\xD39\x8D\x12\x0F\x7D\x97\xA2\x5DE\xB9\x82o\x0Cu\xF4l\xA17s\x28\x2BQT\xE6\x12\x9040000\x20group\x2Dreview\x00\x7E\x7B\xB42\x0F\x3B\xC9o\x2C\xE79\x1DR\xE2\xE4i\xAE\xF6u\x90\x09\xD8\xC9c\xE7\xF7\xC7\x92\xFB\xD7\xDD140000\x20obs\x2Dstaging\x2Dbot\x00\x12\xE8\xAF\x09\xD4\x5D\x13\x8D\xC9\x0AvPDc\xB6\x7C\xAC4\xD9\xC5\xD4_\x98i\xBE2\xA7\x25aj\xE2k40000\x20obs\x2Dstatus\x2Dservice\x00MATY\xA3\xFA\xED\x05\xBE\xEB\x2B\x07\x9CN\xA9\xF3SB\x22MlV\xA4\x5D\xDA\x0B\x0F\x23\xA1\xA8z\xD740000\x20systemd\x00\x2D\xE2\x03\x7E\xBD\xEB6\x8F\xC5\x0E\x12\xD4\xBD\x97P\xDD\xA2\x92\xCE6n\x08Q\xCA\xE4\x15\x97\x8F\x26V\x3DW100644\x20vendor\x2Etar\x2Ezst\x00\xD9\x2Es\x03I\x91\x22\x24\xC86q\x91\x95\xEF\xA3\xC9\x3C\x06D\x90w\xAD\xCB\xAE\xEEu2i\xCE\x05\x09u40000\x20workflow\x2Ddirect\x00\x94\xDB\xDFc\xB5A\xD5\x16\xB3\xC3ng\x94J\xE7\x101jYF\x15Q\xE97\xCFg\x14\x12\x28\x3A\xFC\xDB40000\x20workflow\x2Dpr\x00\xC1\xD8Z9\x18\x60\xA2\xE2\xEF\xB0\xFC\xD7\x2Ah\xF07\x0D\xEC\x8A7\x7E\x1A\xAAn\x13\x9C\xEC\x05s\xE8\xBDf\x00"
|
||||
|
||||
ch := make(chan byte, 2000)
|
||||
for _, b := range []byte(data) {
|
||||
ch <- b
|
||||
}
|
||||
|
||||
tree, err := parseGitTree(ch)
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, item := range tree.items {
|
||||
t.Log(item)
|
||||
if item.name == "workflow-pr" && item.hash == "c1d85a391860a2e2efb0fcd72a68f0370dec8a377e1aaa6e139cec0573e8bd66" && item.isTree() {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("expected submodule not found")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
func TestCommitTreeParsing(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
gitDir := t.TempDir()
|
||||
testDir, _ := os.Getwd()
|
||||
var commitId string
|
||||
@@ -256,15 +449,63 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
commitId = commitId + strings.TrimSpace(string(data))
|
||||
return len(data), nil
|
||||
})
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(gitDir, "", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Run("GitCatFile commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", commitId, "help")
|
||||
if err != nil {
|
||||
t.Error("failed", err)
|
||||
}
|
||||
|
||||
if string(file) != "help\n" {
|
||||
t.Error("expected 'help\\n' but got", string(file))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("GitCatFile commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", "HEAD", "help")
|
||||
if err != nil {
|
||||
t.Error("failed", err)
|
||||
}
|
||||
|
||||
if string(file) != "help\n" {
|
||||
t.Error("expected 'help\\n' but got", string(file))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("GitCatFile bad commit", func(t *testing.T) {
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
file, err := h.GitCatFile("", "518b468f391bf01d5d76d497d7cbecfa8b46d185714cf8745800ae18afb21afd", "help")
|
||||
if err == nil {
|
||||
t.Error("expected error, but not nothing")
|
||||
}
|
||||
|
||||
if string(file) != "" {
|
||||
t.Error("expected 'help\\n' but got", file)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("reads HEAD and parses the tree", func(t *testing.T) {
|
||||
const nodejs21 = "c678c57007d496a98bec668ae38f2c26a695f94af78012f15d044ccf066ccb41"
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
id, ok := h.GitSubmoduleCommitId("", "nodejs21", commitId)
|
||||
if !ok {
|
||||
t.Error("failed parse")
|
||||
@@ -275,9 +516,9 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("reads README.md", func(t *testing.T) {
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
data, err := h.GitCatFile("", commitId, "README.md")
|
||||
if err != nil {
|
||||
t.Errorf("failed parse: %v", err)
|
||||
@@ -288,9 +529,8 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("read HEAD", func(t *testing.T) {
|
||||
h := GitHandlerImpl{
|
||||
GitPath: gitDir,
|
||||
}
|
||||
h, _ := gh.ReadExistingPath(".")
|
||||
defer h.Close()
|
||||
|
||||
data, err := h.GitSubmoduleList("", "HEAD")
|
||||
if err != nil {
|
||||
@@ -307,6 +547,7 @@ func TestCommitTreeParsingOfHead(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGitStatusParse(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
testData := []struct {
|
||||
name string
|
||||
data []byte
|
||||
@@ -373,6 +614,8 @@ func TestGitStatusParse(t *testing.T) {
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Unmerged,
|
||||
States: [3]string{"587ec403f01113f2629da538f6e14b84781f70ac59c41aeedd978ea8b1253a76", "d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c", "087b1d5f22dbf0aa4a879fff27fff03568b334c90daa5f2653f4a7961e24ea33"},
|
||||
|
||||
SubmoduleChanges: "N...",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -380,14 +623,13 @@ func TestGitStatusParse(t *testing.T) {
|
||||
name: "Renamed file",
|
||||
data: []byte("1 M. N... 100644 100644 100644 d23eb05d9ca92883ab9f4d28f3ec90c05f667f3a5c8c8e291bd65e03bac9ae3c 896cd09f36d39e782d66ae32dd5614d4f4d83fc689f132aab2dfc019a9f5b6f3 .gitmodules\x002 R. S... 160000 160000 160000 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 3befe051a34612530acfa84c736d2454278453ec0f78ec028f25d2980f8c3559 R100 pkgQ\x00pkgC\x00"),
|
||||
res: []GitStatusData{
|
||||
{
|
||||
Path: "pkgQ",
|
||||
{
|
||||
Path: "pkgQ",
|
||||
Status: GitStatus_Renamed,
|
||||
States: [3]string{"pkgC"},
|
||||
|
||||
},
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
{
|
||||
Path: ".gitmodules",
|
||||
Status: GitStatus_Modified,
|
||||
},
|
||||
},
|
||||
@@ -412,3 +654,108 @@ func TestGitStatusParse(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitDirectoryListRepro(t *testing.T) {
|
||||
SetTestLogger(t)
|
||||
d := t.TempDir()
|
||||
|
||||
// Setup a mock environment for GitHandlerImpl
|
||||
gh, err := AllocateGitWorkTree(d, "Test", "test@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
org := "repo-org"
|
||||
repoName := "test-repo"
|
||||
repoPath := filepath.Join(d, org, repoName)
|
||||
err = os.MkdirAll(repoPath, 0755)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
runGit := func(args ...string) {
|
||||
cmd := exec.Command("git", args...)
|
||||
cmd.Dir = repoPath
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
t.Fatalf("git %v failed: %v\n%s", args, err, out)
|
||||
}
|
||||
}
|
||||
|
||||
runGit("init", "-b", "main", "--object-format=sha256")
|
||||
runGit("config", "user.email", "test@example.com")
|
||||
runGit("config", "user.name", "test")
|
||||
|
||||
// Create a directory and a file
|
||||
err = os.Mkdir(filepath.Join(repoPath, "subdir"), 0755)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
err = os.WriteFile(filepath.Join(repoPath, "subdir", "file.txt"), []byte("hello"), 0644)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
runGit("add", "subdir/file.txt")
|
||||
runGit("commit", "-m", "add subdir")
|
||||
|
||||
// Now create the handler
|
||||
g, err := gh.CreateGitHandler(org)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Call GitDirectoryList
|
||||
dirs, err := g.GitDirectoryList(repoName, "HEAD")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Logf("Directories found: %v", dirs)
|
||||
|
||||
if len(dirs) == 0 {
|
||||
t.Error("No directories found, but 'subdir' should be there")
|
||||
}
|
||||
if _, ok := dirs["subdir"]; !ok {
|
||||
t.Errorf("Expected 'subdir' in directory list, got %v", dirs)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitDeadlockFix(t *testing.T) {
|
||||
gitDir := t.TempDir()
|
||||
testDir, _ := os.Getwd()
|
||||
|
||||
cmd := exec.Command("/usr/bin/bash", path.Join(testDir, "tsetup.sh"))
|
||||
cmd.Dir = gitDir
|
||||
_, err := cmd.CombinedOutput()
|
||||
|
||||
gh, err := AllocateGitWorkTree(gitDir, "Test", "test@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
h, err := gh.ReadExistingPath(".")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer h.Close()
|
||||
|
||||
// Use a blob ID to trigger error in GitParseCommits
|
||||
// This ensures that the function returns error immediately and doesn't deadlock
|
||||
blobId := "81aba862107f1e2f5312e165453955485f424612f313d6c2fb1b31fef9f82a14"
|
||||
|
||||
done := make(chan error)
|
||||
go func() {
|
||||
_, err := h.GitParseCommits("", []string{blobId})
|
||||
done <- err
|
||||
}()
|
||||
|
||||
select {
|
||||
case err := <-done:
|
||||
if err == nil {
|
||||
t.Error("Expected error from GitParseCommits with blob ID, got nil")
|
||||
} else {
|
||||
// This is expected
|
||||
t.Logf("Got expected error: %v", err)
|
||||
}
|
||||
case <-time.After(2 * time.Second):
|
||||
t.Fatal("GitParseCommits deadlocked! Fix is NOT working.")
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user