2019-01-09 12:41:10 +01:00
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import glob
|
|
|
|
import gzip
|
|
|
|
import hashlib
|
|
|
|
import io
|
|
|
|
import logging
|
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import random
|
|
|
|
import string
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import shutil
|
|
|
|
import tempfile
|
|
|
|
|
|
|
|
from lxml import etree as ET
|
|
|
|
|
|
|
|
from osc import conf
|
|
|
|
import osc.core
|
|
|
|
from osclib.cache_manager import CacheManager
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
import solv
|
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
try:
|
2019-01-18 20:53:44 +01:00
|
|
|
from urllib.parse import urljoin, urlparse
|
2019-01-09 12:41:10 +01:00
|
|
|
except ImportError:
|
|
|
|
# python 2.x
|
2019-01-18 20:53:44 +01:00
|
|
|
from urlparse import urljoin, urlparse
|
2019-01-09 12:41:10 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
def dump_solv_build(baseurl):
|
|
|
|
"""Determine repo format and build string from remote repository."""
|
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
buildre = re.compile(r'.*-Build(.*)')
|
|
|
|
factoryre = re.compile(r'openSUSE-(\d*)-i586-x86_64-Build.*')
|
2019-01-09 12:41:10 +01:00
|
|
|
url = urljoin(baseurl, 'media.1/media')
|
|
|
|
with requests.get(url) as media:
|
2019-01-18 13:54:12 +01:00
|
|
|
if media.status_code == requests.codes.ok:
|
|
|
|
for i, line in enumerate(media.iter_lines()):
|
|
|
|
if i != 1:
|
|
|
|
continue
|
2019-01-18 20:53:44 +01:00
|
|
|
build = factoryre.match(line)
|
|
|
|
if build:
|
|
|
|
return build.group(1)
|
2019-01-18 13:54:12 +01:00
|
|
|
build = buildre.match(line)
|
|
|
|
if build:
|
|
|
|
return build.group(1)
|
2019-01-09 12:41:10 +01:00
|
|
|
|
|
|
|
url = urljoin(baseurl, 'media.1/build')
|
|
|
|
with requests.get(url) as build:
|
2019-01-18 13:54:12 +01:00
|
|
|
if build.status_code == requests.codes.ok:
|
|
|
|
name = build.content.strip()
|
|
|
|
build = buildre.match(name)
|
|
|
|
if build:
|
|
|
|
return build.group(1)
|
2019-01-09 12:41:10 +01:00
|
|
|
|
|
|
|
url = urljoin(baseurl, 'repodata/repomd.xml')
|
|
|
|
with requests.get(url) as media:
|
2019-01-18 13:54:12 +01:00
|
|
|
if media.status_code == requests.codes.ok:
|
|
|
|
root = ET.parse(url)
|
|
|
|
rev = root.find('.//{http://linux.duke.edu/metadata/repo}revision')
|
|
|
|
if rev is not None:
|
|
|
|
return rev.text
|
2019-01-09 12:41:10 +01:00
|
|
|
|
|
|
|
raise Exception(baseurl + 'includes no build number')
|
|
|
|
|
2019-01-18 13:54:12 +01:00
|
|
|
def parse_repomd(repo, baseurl):
|
2019-01-09 12:41:10 +01:00
|
|
|
url = urljoin(baseurl, 'repodata/repomd.xml')
|
|
|
|
repomd = requests.get(url)
|
2019-01-18 13:54:12 +01:00
|
|
|
if repomd.status_code != requests.codes.ok:
|
|
|
|
return False
|
|
|
|
|
2019-01-09 12:41:10 +01:00
|
|
|
ns = {'r': 'http://linux.duke.edu/metadata/repo'}
|
|
|
|
root = ET.fromstring(repomd.content)
|
|
|
|
primary_element = root.find('.//r:data[@type="primary"]', ns)
|
|
|
|
location = primary_element.find('r:location', ns).get('href')
|
|
|
|
sha256_expected = primary_element.find('r:checksum[@type="sha256"]', ns).text
|
|
|
|
|
|
|
|
f = tempfile.TemporaryFile()
|
|
|
|
f.write(repomd.content)
|
|
|
|
f.flush()
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_repomdxml(f, 0)
|
2019-01-18 13:54:12 +01:00
|
|
|
url = urljoin(baseurl, location)
|
2019-01-09 12:41:10 +01:00
|
|
|
with requests.get(url, stream=True) as primary:
|
2019-01-18 13:54:12 +01:00
|
|
|
if primary.status_code != requests.codes.ok:
|
|
|
|
raise Exception(url + ' does not exist')
|
2019-01-09 12:41:10 +01:00
|
|
|
sha256 = hashlib.sha256(primary.content).hexdigest()
|
|
|
|
if sha256 != sha256_expected:
|
|
|
|
raise Exception('checksums do not match {} != {}'.format(sha256, sha256_expected))
|
|
|
|
|
|
|
|
content = gzip.GzipFile(fileobj=io.BytesIO(primary.content))
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
f.write(content.read())
|
|
|
|
f.flush()
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_rpmmd(f, None, 0)
|
2019-01-18 13:54:12 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def parse_susetags(repo, baseurl):
|
|
|
|
url = urljoin(baseurl, 'content')
|
|
|
|
content = requests.get(url)
|
|
|
|
if content.status_code != requests.codes.ok:
|
|
|
|
return False
|
|
|
|
|
|
|
|
f = tempfile.TemporaryFile()
|
|
|
|
f.write(content.content)
|
|
|
|
f.flush()
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_content(solv.xfopen_fd(None, f.fileno()), 0)
|
|
|
|
|
|
|
|
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
|
|
|
descrdir = repo.meta.lookup_str(solv.SUSETAGS_DESCRDIR)
|
|
|
|
if not descrdir:
|
2019-01-18 20:53:44 +01:00
|
|
|
descrdir = 'suse/setup/descr'
|
2019-01-18 13:54:12 +01:00
|
|
|
|
|
|
|
url = urljoin(baseurl, descrdir + '/packages.gz')
|
|
|
|
with requests.get(url, stream=True) as packages:
|
|
|
|
if packages.status_code != requests.codes.ok:
|
|
|
|
raise Exception(url + ' does not exist')
|
|
|
|
|
|
|
|
content = gzip.GzipFile(fileobj=io.BytesIO(packages.content))
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
f.write(content.read())
|
|
|
|
f.flush()
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_susetags(f, defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def dump_solv(name, baseurl):
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch()
|
|
|
|
|
2019-05-02 20:34:21 +02:00
|
|
|
repo = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
|
2019-01-18 13:54:12 +01:00
|
|
|
if not parse_repomd(repo, baseurl) and not parse_susetags(repo, baseurl):
|
|
|
|
raise Exception('neither repomd nor susetags exists in ' + baseurl)
|
|
|
|
|
|
|
|
repo.create_stubs()
|
|
|
|
|
|
|
|
ofh = open(name, 'w')
|
|
|
|
repo.write(ofh)
|
|
|
|
ofh.flush()
|
|
|
|
|
|
|
|
return name
|
2019-01-09 12:41:10 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
def print_repo_delta(pool, repo2, packages_file):
|
2019-01-18 16:06:19 +01:00
|
|
|
print('=Ver: 2.0', file=packages_file)
|
|
|
|
present = dict()
|
2019-01-18 20:53:44 +01:00
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.repo != repo2:
|
|
|
|
key = '{}/{}'.format(s.name, s.arch)
|
|
|
|
present.setdefault(key, {})
|
|
|
|
present[key][s.evr] = s.repo
|
2019-01-18 16:06:19 +01:00
|
|
|
for s in repo2.solvables:
|
2019-01-18 20:53:44 +01:00
|
|
|
if s.arch == 'src': continue
|
|
|
|
key = '{}/{}'.format(s.name, s.arch)
|
|
|
|
if present.get(key, {}).get(s.evr):
|
|
|
|
continue
|
|
|
|
elif not key in present:
|
|
|
|
print('# NEW', s.name, s.arch, file=packages_file)
|
2019-01-18 16:06:19 +01:00
|
|
|
evr = s.evr.split('-')
|
|
|
|
release = evr.pop()
|
|
|
|
print('=Pkg:', s.name, '-'.join(evr), release, s.arch, file=packages_file)
|
|
|
|
print('+Prv:', file=packages_file)
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_PROVIDES):
|
|
|
|
print(dep, file=packages_file)
|
|
|
|
print('-Prv:', file=packages_file)
|
2019-01-09 12:41:10 +01:00
|
|
|
|
|
|
|
def update_project(apiurl, project):
|
2019-01-18 20:53:44 +01:00
|
|
|
# Cache dir specific to hostname and project.
|
|
|
|
host = urlparse(apiurl).hostname
|
|
|
|
cache_dir = CacheManager.directory('update_repo_handler', host, project)
|
|
|
|
repo_dir = os.path.join(cache_dir, '000update-repos')
|
|
|
|
|
|
|
|
# development aid
|
|
|
|
checkout = True
|
|
|
|
if checkout:
|
|
|
|
if os.path.exists(cache_dir):
|
|
|
|
shutil.rmtree(cache_dir)
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
|
|
|
|
osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)
|
|
|
|
|
|
|
|
root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
|
2019-01-09 12:41:10 +01:00
|
|
|
for item in root:
|
|
|
|
key = item.keys()[0]
|
2019-01-18 20:53:44 +01:00
|
|
|
opts = item[key]
|
2019-01-18 13:54:12 +01:00
|
|
|
# cast 15.1 to string :)
|
2019-01-18 20:53:44 +01:00
|
|
|
key = str(key)
|
|
|
|
if not opts['url'].endswith('/'):
|
|
|
|
opts['url'] += '/'
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
if opts.get('refresh', False):
|
|
|
|
opts['build'] = dump_solv_build(opts['url'])
|
|
|
|
path = '{}_{}.packages'.format(key, opts['build'])
|
|
|
|
else:
|
|
|
|
path = key + '.packages'
|
|
|
|
packages_file = os.path.join(repo_dir, path)
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
if os.path.exists(packages_file + '.xz'):
|
|
|
|
print(path, 'already exists')
|
|
|
|
continue
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
solv_file = packages_file + '.solv'
|
|
|
|
dump_solv(solv_file, opts['url'])
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch()
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
if opts.get('refresh', False):
|
|
|
|
for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
|
|
|
|
repo = pool.add_repo(file)
|
|
|
|
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
|
|
|
f = tempfile.TemporaryFile()
|
|
|
|
# FIXME: port to lzma module with python3
|
|
|
|
st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-05-02 20:34:21 +02:00
|
|
|
repo1 = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
|
2019-01-18 20:53:44 +01:00
|
|
|
repo1.add_solv(solv_file)
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
print_repo_delta(pool, repo1, open(packages_file, 'w'))
|
|
|
|
subprocess.call(['xz', '-9', packages_file])
|
|
|
|
os.unlink(solv_file)
|
2019-01-18 16:06:19 +01:00
|
|
|
|
2019-01-18 20:53:44 +01:00
|
|
|
url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
|
|
|
|
osc.core.http_PUT(url, data=open(packages_file + '.xz').read())
|
|
|
|
|
|
|
|
del pool
|