openSUSE-release-tools/publish_distro
Lubos Kocman 7edf9254dc publish_distro add --now to set PUBLISH_DELAY=0
* we have to set it to 0 avery single time
  when we're doing some debuging on pontifex
  and such approach leaves dirty checkout
2022-11-29 14:46:32 +01:00

288 lines
7.2 KiB
Bash
Executable File

#!/bin/bash
#
# The script is executed periodically by cronjob
# as part of ~mirror/bin/publish_factory_leap wrapper on pontifex
#
# Tool publises data from stage (/src/ftp-stage) to public (/srv/ftp - download.opensuse.org)
#
# Configuration lives in openSUSE-release-tools.git/publish_distro_conf
# Syntax: ./publish_distro config_file
#
# no --delete built in
# number of hours to delay publishing when new isos arrive
PUBLISH_DELAY=2
# force publish even if isos appear to be synced already
force=
# used for cleanup
date='now'
# set to echo for dry run
dryrun=
# cleanup instead of sync
do_cleanup=
# url with snapshot changes app
diff_url_base="https://openqa.opensuse.org/snapshot-changes/opensuse"
changes_dir_base="/srv/www-local/snapshot-changes"
repos=("oss" "non-oss")
extra_repos=("source" "debug")
isodir="iso"
get_version() {
exit 1
}
get_iso() {
exit 1
}
get_iso_link() {
exit 1
}
get_diff_url() {
exit 1
}
get_mark_published_url() {
exit 1
}
get_changes_filename() {
exit 1
}
# --dry and --cleanup should not be passed to rsync through $@
# config file and possibly other opts including --force need to be
for arg do
shift
if [ "$arg" == '--force' ]; then
force=1
newargs+=("$arg")
shift
continue
elif [ "$arg" == '--dry' ]; then
dryrun=echo
shift
continue
elif [ "$arg" == '--cleanup' ]; then
do_cleanup=1
date="90 days ago"
continue
elif [ "$arg" == '--now' ]; then
PUBLISH_DELAY=0
shift
continue
else
newargs+=("$arg")
fi
done
# set newargs as $@
set -- "${newargs[@]}"
. "$1" || { echo "need to specify config file" >&2; exit 1; }
shift
stage="/srv/ftp-stage/pub/opensuse/$path"
dest="/srv/ftp/pub/opensuse/$path"
if [ ! -e "$stage" -o ! -e "$dest" ]; then
echo "stage ($stage) and/or dest ($dest) doesn't exist" >&2
exit 1
fi
if [ -n "$do_cleanup" ]; then
# find newest delete log of that day
deletelog=$(ls -1tr ${deletelog%/*}/*-deletes.log | tail -1)
test -n "$deletelog" || exit 1
(
df -h /srv
for i in $(awk '/\*deleting .*/ { print $2}' $deletelog ); do
if [ -e "$stage/$i" ]; then
echo "WARNING: $i still exist in stage, not deleting"
else
$dryrun rm -rvf "$dest/$i"
fi
done
df -h /srv
) > $deletelog-deleted
exit 0
fi
mkdir -p "${synclog%/*}" "${deletelog%/*}"
if test -f $synclog; then
old $synclog
fi
if test -f $deletelog; then
old $deletelog
fi
version=
do_sync_isos=1
for flavor in "${flavors[@]}"; do
get_version
get_iso
get_diff_url
get_changes_filename
if [ -z "$force" -a -e "$dest/$isodir/$iso" ]; then
if [ -t 1 ]; then # only log to tty
echo "$iso already published, skipping isos"
fi
do_sync_isos=0
break
fi
if [ -n "$changes" ]; then
if [ -d "$stage/$changes" ]; then
# new way, use the obs generated changelogs
$dryrun rsync -avvhiH $stage/$changes \
--link-dest=$stage \
$dest/$changes $dry_delete "$@" \
| LC_ALL=C grep -v '^\.[fdL] ' \
| LC_ALL=C grep -v '^\(sending\|delta\)' \
| tee -a $log
else
# old way (already broken?)
if [ ! -s "$changes" ]; then
echo "generating $changes" | tee -a $synclog
[ -z "$dryrun" ] || changes=/dev/stdout
$dryrun curl -sf "$url" > $changes
fi
if [ ! -e $stage/$isodir/$changes ]; then
$dryrun cp -v $changes $stage/$isodir | tee -a $synclog
[ -e $stage/$isodir/Changes ] || $dryrun ln -sf . $stage/$isodir/Changes 2>&1 | tee -a $synclog
fi
fi
fi
if [ ! -e "$stage/$isodir/$iso" ]; then
echo "$flavor with $version doesn't exist, skipping isos" | tee -a $synclog
do_sync_isos=0
break
else
get_iso_link
if [ "`readlink $link`" != "$iso" ]; then
$dryrun ln -sf "$iso" "$link"
$dryrun ln -sf "$iso.sha256" "$link.sha256"
fi
if [ -z "$force" ]; then
if test `date -d "$PUBLISH_DELAY hours ago" +%s` -lt `stat -c "%Z" "$stage/$isodir/$iso"`; then
echo "$iso was created less than $PUBLISH_DELAY hours ago, delay publishing" | tee -a $synclog
do_sync_isos=0
fi
fi
fi
done
# produce directories stamped with build number
for r in "${repos[@]/#//repo/}"; do
for i in /suse/setup /boot; do
d="$stage$r$i"
[ -d "$d" ] || continue
if [ -e "$stage$r/media.1/build" ]; then
read build < "$stage$r/media.1/build"
stamp="${build#*Build}"
if [ "$build" = "$stamp" ]; then
echo "ERROR: build id not parsable: $build. not syncing"
do_sync_isos=0
break 2
fi
elif [ -e "$stage$r/media.1/media" ]; then
{ read dummy; read build; } < "$stage$r/media.1/media"
stamp="${build#*Build}"
if [ "$build" = "$stamp" ]; then
echo "ERROR: build id not parsable: $build. not syncing"
do_sync_isos=0
break 2
fi
else
echo "ERROR: repo $r is missing build id, not syncing"
do_sync_isos=0
break 2
fi
stamped="$stage$r${i%/*}/$stamp-${i##*/}"
if [ ! -L "$d" ]; then
$dryrun mv "$d" "$stamped"
$dryrun ln -s "${stamped##*/}" "$d"
echo "current $stamp" | $dryrun tee $stage/$r/.current.txt
fi
done
done
if [ "$do_sync_isos" = 0 ]; then
if [ -t 1 ]; then
echo "nothing to do"
fi
exit 0
fi
# scan mirrors before making the files visible
#mb scan -q -a -d /tumbleweed
TODO=()
#if [ "$do_sync_isos" = 1 ]; then
TODO+=(iso)
echo "current $version" | $dryrun tee $stage/$isodir/.current.txt
#fi
if [ -n "$repos" ]; then
TODO+=(repo)
TODO+=(DELETE_repo)
fi
# a single grep -v regexp performs awfully bad (needs at least 30x so long)
# although LC_ALL=C might fix this, too
for i in "${TODO[@]}"; do
case $i in
DELETE_*)
i=${i#DELETE_}
log=$deletelog
dry_delete="--delete -n"
;;
iso) # ISOs we sync with delete
log=$synclog
dry_delete="--delete-after"
i="$isodir"
;;
*)
log=$synclog
dry_delete=""
;;
esac
echo ========== $i $dry_delete ===========
$dryrun rsync -avvhiH $stage/$i \
--link-dest=$stage \
$dest $dry_delete "$@" \
| LC_ALL=C grep -v '^\.[fdL] ' \
| LC_ALL=C grep -v '^\(sending\|delta\)' \
| tee -a $log
done
# also sync source and debug, not bind mounted for Leap
for r in "${extra_repos[@]}"; do
stage="/srv/ftp-stage/pub/opensuse/$r/$path"
dest="/srv/ftp/pub/opensuse/$r/$path"
$dryrun rsync -avhiH $stage/ --link-dest=$stage \
$dest --delete-after "$@" | tee -a log
done
# mark published
get_mark_published_url
if [ -n "$url" ]; then
$dryrun curl -X POST -F "version=$version" "$url"
fi
#echo creating hashes
#metalink-hasher update -t /srv/metalink-hashes/srv/ftp/pub/opensuse/tumbleweed \
# -b /srv/ftp/pub/opensuse/tumbleweed \
# /srv/ftp/pub/opensuse/tumbleweed
# mb makehaskes is not needed by mirrorcache (replaced mirrorbrain)
#$dryrun /usr/bin/mb makehashes -b /srv/ftp/pub/opensuse \
# -t /srv/metalink-hashes/srv/ftp/pub/opensuse \
# $dest