2015-05-27 17:15:05 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (c) 2015 SUSE Linux GmbH
|
2016-04-29 10:44:56 +02:00
|
|
|
# Copyright (c) 2016 SUSE LLC
|
2015-05-27 17:15:05 +02:00
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import sys
|
2015-08-05 12:00:46 +02:00
|
|
|
import urllib2
|
2015-08-30 16:54:20 +02:00
|
|
|
import time
|
2015-05-27 17:15:05 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
|
|
|
import osc.conf
|
|
|
|
import osc.core
|
2015-06-08 13:26:07 +02:00
|
|
|
import rpm
|
2015-09-16 10:30:14 +02:00
|
|
|
import yaml
|
2016-12-14 11:08:46 +01:00
|
|
|
import re
|
2016-05-02 10:13:59 +02:00
|
|
|
from urllib import quote_plus
|
2015-05-27 17:15:05 +02:00
|
|
|
|
|
|
|
from osclib.memoize import memoize
|
2017-04-11 15:53:24 +02:00
|
|
|
from osclib.conf import Config
|
|
|
|
from osclib.stagingapi import StagingAPI
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-11-29 16:29:48 +08:00
|
|
|
OPENSUSE = 'openSUSE:Leap:42.3'
|
2015-08-24 15:53:19 +02:00
|
|
|
FACTORY = 'openSUSE:Factory'
|
2016-11-29 16:29:48 +08:00
|
|
|
SLE = 'SUSE:SLE-12-SP2:Update'
|
2015-05-27 17:15:05 +02:00
|
|
|
|
|
|
|
makeurl = osc.core.makeurl
|
|
|
|
http_GET = osc.core.http_GET
|
|
|
|
|
2016-05-02 10:13:59 +02:00
|
|
|
# http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python
|
|
|
|
def chunks(l, n):
|
|
|
|
""" Yield successive n-sized chunks from l.
|
|
|
|
"""
|
|
|
|
for i in xrange(0, len(l), n):
|
|
|
|
yield l[i:i+n]
|
2015-05-27 17:15:05 +02:00
|
|
|
|
|
|
|
class UpdateCrawler(object):
|
|
|
|
def __init__(self, from_prj, to_prj):
|
|
|
|
self.from_prj = from_prj
|
|
|
|
self.to_prj = to_prj
|
|
|
|
self.apiurl = osc.conf.config['apiurl']
|
|
|
|
self.debug = osc.conf.config['debug']
|
2016-04-29 10:44:56 +02:00
|
|
|
self.filter_lookup = set()
|
2016-05-02 10:13:59 +02:00
|
|
|
self.caching = False
|
|
|
|
self.dryrun = False
|
2016-05-25 13:38:50 +02:00
|
|
|
self.skipped = {}
|
|
|
|
self.submit_new = {}
|
2017-04-11 15:53:24 +02:00
|
|
|
self.api = StagingAPI(
|
|
|
|
osc.conf.config['apiurl'], project = to_prj)
|
2016-05-02 10:13:59 +02:00
|
|
|
|
|
|
|
self.parse_lookup()
|
|
|
|
|
|
|
|
# FIXME: duplicated from manager_42
|
|
|
|
def latest_packages(self):
|
2017-08-07 14:19:38 +02:00
|
|
|
apiurl = self.apiurl
|
|
|
|
prj = self.from_prj
|
|
|
|
if prj.startswith('openSUSE.org:'):
|
|
|
|
apiurl = 'https://api.opensuse.org'
|
|
|
|
prj = prj[len('openSUSE.org:'):]
|
|
|
|
data = self.cached_GET(makeurl(apiurl,
|
|
|
|
['project', 'latest_commits', prj]))
|
2016-05-02 10:13:59 +02:00
|
|
|
lc = ET.fromstring(data)
|
|
|
|
packages = set()
|
|
|
|
for entry in lc.findall('{http://www.w3.org/2005/Atom}entry'):
|
|
|
|
title = entry.find('{http://www.w3.org/2005/Atom}title').text
|
|
|
|
if title.startswith('In '):
|
|
|
|
packages.add(title[3:].split(' ')[0])
|
|
|
|
return sorted(packages)
|
|
|
|
|
|
|
|
@memoize()
|
|
|
|
def _cached_GET(self, url):
|
|
|
|
return self.retried_GET(url).read()
|
|
|
|
|
|
|
|
def cached_GET(self, url):
|
|
|
|
if self.caching:
|
|
|
|
return self._cached_GET(url)
|
|
|
|
return self.retried_GET(url).read()
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2015-08-30 16:54:20 +02:00
|
|
|
def retried_GET(self, url):
|
|
|
|
try:
|
|
|
|
return http_GET(url)
|
|
|
|
except urllib2.HTTPError, e:
|
2015-08-30 17:00:21 +02:00
|
|
|
if 500 <= e.code <= 599:
|
2015-08-30 16:54:20 +02:00
|
|
|
print 'Retrying {}'.format(url)
|
|
|
|
time.sleep(1)
|
|
|
|
return self.retried_GET(url)
|
|
|
|
raise e
|
|
|
|
|
2016-05-13 15:02:37 +02:00
|
|
|
def get_project_meta(self, prj):
|
|
|
|
url = makeurl(self.apiurl, ['source', prj, '_meta'])
|
|
|
|
return self.cached_GET(url)
|
|
|
|
|
|
|
|
def is_maintenance_project(self, prj):
|
|
|
|
root = ET.fromstring(self.get_project_meta(prj))
|
|
|
|
return root.get('kind', None) == 'maintenance_release'
|
|
|
|
|
2016-05-02 10:13:59 +02:00
|
|
|
def _meta_get_packagelist(self, prj, deleted=None, expand=False):
|
|
|
|
|
|
|
|
query = {}
|
|
|
|
if deleted:
|
|
|
|
query['deleted'] = 1
|
|
|
|
if expand:
|
|
|
|
query['expand'] = 1
|
|
|
|
|
|
|
|
u = osc.core.makeurl(self.apiurl, ['source', prj], query)
|
|
|
|
return self.cached_GET(u)
|
|
|
|
|
|
|
|
def meta_get_packagelist(self, prj, deleted=None, expand=False):
|
|
|
|
root = ET.fromstring(self._meta_get_packagelist(prj, deleted, expand))
|
|
|
|
return [ node.get('name') for node in root.findall('entry') if not node.get('name') == '_product' and not node.get('name').startswith('_product:') and not node.get('name').startswith('patchinfo.') ]
|
|
|
|
|
|
|
|
def _get_source_infos(self, project, packages):
|
|
|
|
query = [ 'view=info' ]
|
|
|
|
if packages:
|
|
|
|
query += [ 'package=%s'%quote_plus(p) for p in packages ]
|
|
|
|
|
|
|
|
return self.cached_GET(makeurl(self.apiurl,
|
2015-08-08 15:31:39 +02:00
|
|
|
['source', project],
|
2016-05-02 10:13:59 +02:00
|
|
|
query))
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-05-02 10:13:59 +02:00
|
|
|
def get_source_infos(self, project, packages):
|
2015-08-08 15:31:39 +02:00
|
|
|
ret = dict()
|
2016-05-02 10:13:59 +02:00
|
|
|
for pkg_chunks in chunks(sorted(packages), 50):
|
|
|
|
root = ET.fromstring(self._get_source_infos(project, pkg_chunks))
|
|
|
|
for package in root.findall('sourceinfo'):
|
|
|
|
if package.findall('error'):
|
|
|
|
continue
|
|
|
|
ret[package.get('package')] = package
|
2015-08-08 15:31:39 +02:00
|
|
|
return ret
|
|
|
|
|
2016-12-14 11:08:46 +01:00
|
|
|
def _get_source_package(self, project, package, revision):
|
|
|
|
opts = { 'view': 'info' }
|
|
|
|
if revision:
|
|
|
|
opts['rev'] = revision
|
|
|
|
return self.cached_GET(makeurl(self.apiurl,
|
|
|
|
['source', project, package], opts))
|
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
def _find_existing_request(self, src_project, src_package, rev, dst_project,
|
|
|
|
dst_package):
|
2015-05-27 17:15:05 +02:00
|
|
|
"""Create a submit request."""
|
2017-04-11 14:59:48 -05:00
|
|
|
states = ['new', 'review', 'declined', 'revoked', 'superseded']
|
2015-05-27 17:15:05 +02:00
|
|
|
reqs = osc.core.get_exact_request_list(self.apiurl,
|
|
|
|
src_project,
|
|
|
|
dst_project,
|
|
|
|
src_package,
|
|
|
|
dst_package,
|
|
|
|
req_type='submit',
|
|
|
|
req_state=states)
|
2015-09-27 08:27:32 +02:00
|
|
|
foundrev = False
|
|
|
|
for r in reqs:
|
|
|
|
for a in r.actions:
|
2016-12-14 11:08:46 +01:00
|
|
|
srcrev = a.src_rev
|
|
|
|
# sometimes requests only contain the decimal revision
|
|
|
|
if re.match(r'^\d+$', srcrev) is not None:
|
|
|
|
xml = ET.fromstring(self._get_source_package(src_project,src_package, srcrev))
|
|
|
|
srcrev = xml.get('verifymd5')
|
|
|
|
logging.debug('rev {}'.format(srcrev))
|
|
|
|
if srcrev == rev:
|
2016-05-02 10:13:59 +02:00
|
|
|
logging.debug('{}: found existing request {}'.format(dst_package, r.reqid))
|
2015-09-27 08:27:32 +02:00
|
|
|
foundrev = True
|
2016-05-25 13:38:50 +02:00
|
|
|
return foundrev
|
|
|
|
|
|
|
|
def _submitrequest(self, src_project, src_package, rev, dst_project,
|
|
|
|
dst_package, msg):
|
2015-06-24 16:14:58 +02:00
|
|
|
res = 0
|
2016-05-25 13:38:50 +02:00
|
|
|
print "creating submit request", src_project, src_package, rev, dst_project, dst_package
|
|
|
|
if not self.dryrun:
|
|
|
|
res = osc.core.create_submit_request(self.apiurl,
|
|
|
|
src_project,
|
|
|
|
src_package,
|
|
|
|
dst_project,
|
|
|
|
dst_package,
|
|
|
|
orev=rev,
|
|
|
|
message=msg)
|
2015-05-27 17:15:05 +02:00
|
|
|
return res
|
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
def submitrequest(self, src_project, src_package, rev, dst_package, origin):
|
2015-05-27 17:15:05 +02:00
|
|
|
"""Create a submit request using the osc.commandline.Osc class."""
|
|
|
|
dst_project = self.to_prj
|
|
|
|
msg = 'Automatic request from %s by UpdateCrawler' % src_project
|
2016-05-25 13:38:50 +02:00
|
|
|
if not self._find_existing_request(src_project, src_package, rev, dst_project, dst_package):
|
|
|
|
return self._submitrequest(src_project, src_package, rev, dst_project,
|
2015-06-24 16:14:58 +02:00
|
|
|
dst_package, msg)
|
2016-05-25 13:38:50 +02:00
|
|
|
return 0
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2015-08-05 12:00:46 +02:00
|
|
|
def is_source_innerlink(self, project, package):
|
|
|
|
try:
|
2016-05-13 15:02:37 +02:00
|
|
|
root = ET.fromstring(
|
|
|
|
self.cached_GET(makeurl(self.apiurl,
|
2015-08-05 12:00:46 +02:00
|
|
|
['source', project, package, '_link']
|
2016-05-13 15:02:37 +02:00
|
|
|
)))
|
2015-08-05 12:00:46 +02:00
|
|
|
if root.get('project') is None and root.get('cicount'):
|
|
|
|
return True
|
|
|
|
except urllib2.HTTPError, err:
|
|
|
|
# if there is no link, it can't be a link
|
|
|
|
if err.code == 404:
|
|
|
|
return False
|
|
|
|
raise
|
|
|
|
|
2015-09-16 10:30:14 +02:00
|
|
|
def parse_lookup(self):
|
2016-05-02 10:13:59 +02:00
|
|
|
self.lookup = yaml.safe_load(self._load_lookup_file())
|
2015-09-16 10:30:14 +02:00
|
|
|
|
|
|
|
def _load_lookup_file(self):
|
2016-05-17 15:35:26 +02:00
|
|
|
prj = self.to_prj
|
2016-05-02 10:13:59 +02:00
|
|
|
return self.cached_GET(makeurl(self.apiurl,
|
2016-05-17 15:35:26 +02:00
|
|
|
['source', prj, '00Meta', 'lookup.yml']))
|
2015-09-16 10:30:14 +02:00
|
|
|
|
2015-08-08 15:31:39 +02:00
|
|
|
def follow_link(self, project, package, rev, verifymd5):
|
|
|
|
#print "follow", project, package, rev
|
|
|
|
# verify it's still the same package
|
2016-12-14 11:08:46 +01:00
|
|
|
xml = ET.fromstring(self._get_source_package(project, package, rev))
|
2015-08-08 15:31:39 +02:00
|
|
|
if xml.get('verifymd5') != verifymd5:
|
|
|
|
return None
|
2016-12-14 11:08:46 +01:00
|
|
|
xml = ET.fromstring(self.cached_GET(makeurl(self.apiurl,
|
2015-08-08 15:31:39 +02:00
|
|
|
['source', project, package],
|
|
|
|
{
|
|
|
|
'rev': rev
|
2016-12-14 11:08:46 +01:00
|
|
|
})))
|
2015-08-08 15:31:39 +02:00
|
|
|
linkinfo = xml.find('linkinfo')
|
|
|
|
if not linkinfo is None:
|
|
|
|
ret = self.follow_link(linkinfo.get('project'), linkinfo.get('package'), linkinfo.get('srcmd5'), verifymd5)
|
|
|
|
if ret:
|
|
|
|
project, package, rev = ret
|
|
|
|
return (project, package, rev)
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-04-29 10:44:56 +02:00
|
|
|
def update_targets(self, targets, sources):
|
2016-05-13 15:02:37 +02:00
|
|
|
|
|
|
|
# special case maintenance project. Only consider main
|
|
|
|
# package names. The code later follows the link in the
|
|
|
|
# source project then.
|
|
|
|
if self.is_maintenance_project(self.from_prj):
|
|
|
|
mainpacks = set()
|
|
|
|
for package, sourceinfo in sources.items():
|
|
|
|
if package.startswith('patchinfo.'):
|
|
|
|
continue
|
|
|
|
files = set([node.text for node in sourceinfo.findall('filename')])
|
|
|
|
if '{}.spec'.format(package) in files:
|
|
|
|
mainpacks.add(package)
|
|
|
|
|
|
|
|
sources = { package: sourceinfo for package, sourceinfo in sources.iteritems() if package in mainpacks }
|
|
|
|
|
2016-04-29 10:55:33 +02:00
|
|
|
for package, sourceinfo in sources.items():
|
2016-05-02 10:13:59 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
origin = self.lookup.get(package, '')
|
|
|
|
if self.filter_lookup and not origin in self.filter_lookup:
|
|
|
|
if not origin.startswith('subpackage of'):
|
|
|
|
self.skipped.setdefault(origin, set()).add(package)
|
2016-04-29 10:44:56 +02:00
|
|
|
continue
|
|
|
|
|
2016-04-29 10:55:33 +02:00
|
|
|
if not package in targets:
|
2016-05-25 13:38:50 +02:00
|
|
|
if not self.submit_new:
|
|
|
|
logging.info('Package %s not found in targets' % (package))
|
|
|
|
continue
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
if self.is_source_innerlink(self.from_prj, package):
|
|
|
|
logging.debug('Package %s is sub package' % (package))
|
|
|
|
continue
|
2015-06-08 13:26:07 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
else:
|
|
|
|
targetinfo = targets[package]
|
2016-04-29 10:44:56 +02:00
|
|
|
|
2017-04-11 15:53:24 +02:00
|
|
|
# XXX: make more generic :-)
|
|
|
|
devel_prj = self.api.get_devel_project(FACTORY, package)
|
|
|
|
if devel_prj == 'devel:languages:haskell':
|
|
|
|
logging.info('skipping haskell package %s' % package)
|
|
|
|
continue
|
2015-06-08 13:26:07 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
# Compare verifymd5
|
|
|
|
md5_from = sourceinfo.get('verifymd5')
|
|
|
|
md5_to = targetinfo.get('verifymd5')
|
|
|
|
if md5_from == md5_to:
|
|
|
|
#logging.info('Package %s not marked for update' % package)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if self.is_source_innerlink(self.to_prj, package):
|
|
|
|
logging.debug('Package %s is sub package' % (package))
|
|
|
|
continue
|
2015-08-05 12:00:46 +02:00
|
|
|
|
2016-04-29 10:44:56 +02:00
|
|
|
# this makes only sense if we look at the expanded view
|
|
|
|
# and want to submit from proper project
|
|
|
|
# originproject = default_origin
|
2016-04-29 10:55:33 +02:00
|
|
|
# if not sourceinfo.find('originproject') is None:
|
|
|
|
# originproject = sourceinfo.find('originproject').text
|
2016-04-29 10:44:56 +02:00
|
|
|
# logging.warn('changed originproject for {} to {}'.format(package, originproject))
|
2015-08-08 15:31:39 +02:00
|
|
|
|
2016-04-29 10:44:56 +02:00
|
|
|
src_project, src_package, src_rev = self.follow_link(self.from_prj, package,
|
2016-04-29 10:55:33 +02:00
|
|
|
sourceinfo.get('srcmd5'),
|
|
|
|
sourceinfo.get('verifymd5'))
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
res = self.submitrequest(src_project, src_package, src_rev, package, origin)
|
2015-05-27 17:15:05 +02:00
|
|
|
if res:
|
|
|
|
logging.info('Created request %s for %s' % (res, package))
|
2015-06-24 16:14:58 +02:00
|
|
|
elif res != 0:
|
2015-05-27 17:15:05 +02:00
|
|
|
logging.error('Error creating the request for %s' % package)
|
|
|
|
|
2016-05-02 10:13:59 +02:00
|
|
|
def crawl(self, packages):
|
2015-08-24 15:53:19 +02:00
|
|
|
"""Main method of the class that run the crawler."""
|
2016-05-02 10:13:59 +02:00
|
|
|
targets = self.get_source_infos(self.to_prj, packages)
|
|
|
|
sources = self.get_source_infos(self.from_prj, packages)
|
2016-04-29 10:44:56 +02:00
|
|
|
self.update_targets(targets, sources)
|
2015-08-24 15:53:19 +02:00
|
|
|
|
2015-05-27 17:15:05 +02:00
|
|
|
def main(args):
|
|
|
|
# Configure OSC
|
|
|
|
osc.conf.get_config(override_apiurl=args.apiurl)
|
2016-04-29 10:44:56 +02:00
|
|
|
osc.conf.config['debug'] = args.osc_debug
|
|
|
|
|
2017-04-11 15:53:24 +02:00
|
|
|
# initialize stagingapi config
|
|
|
|
Config(args.to_prj)
|
|
|
|
|
2016-04-29 10:44:56 +02:00
|
|
|
uc = UpdateCrawler(args.from_prj, args.to_prj)
|
2016-05-02 10:13:59 +02:00
|
|
|
uc.caching = args.cache_requests
|
|
|
|
uc.dryrun = args.dry
|
2016-05-25 13:38:50 +02:00
|
|
|
uc.submit_new = args.new
|
2016-04-29 10:44:56 +02:00
|
|
|
if args.only_from:
|
2016-05-25 13:38:50 +02:00
|
|
|
for prj in args.only_from:
|
|
|
|
uc.filter_lookup.add(prj)
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-05-02 10:13:59 +02:00
|
|
|
given_packages = args.packages
|
|
|
|
if not given_packages:
|
|
|
|
if args.all:
|
|
|
|
given_packages = uc.meta_get_packagelist(args.from_prj)
|
|
|
|
else:
|
|
|
|
given_packages = uc.latest_packages()
|
|
|
|
uc.crawl(given_packages)
|
2015-05-27 17:15:05 +02:00
|
|
|
|
2016-05-25 13:38:50 +02:00
|
|
|
if uc.skipped:
|
|
|
|
from pprint import pformat
|
|
|
|
logging.debug("skipped packages: %s", pformat(uc.skipped))
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-05-27 17:15:05 +02:00
|
|
|
if __name__ == '__main__':
|
2015-09-16 10:30:14 +02:00
|
|
|
description = 'Create update SRs for Leap.'
|
2015-05-27 17:15:05 +02:00
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL')
|
|
|
|
parser.add_argument('-d', '--debug', action='store_true',
|
|
|
|
help='print info useful for debuging')
|
2016-05-02 10:13:59 +02:00
|
|
|
parser.add_argument('-a', '--all', action='store_true',
|
|
|
|
help='check all packages')
|
2016-04-29 10:44:56 +02:00
|
|
|
parser.add_argument('-n', '--dry', action='store_true',
|
|
|
|
help='dry run, no POST, PUT, DELETE')
|
|
|
|
parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT',
|
|
|
|
help='project where to get the updates (default: %s)' % SLE,
|
|
|
|
default=SLE)
|
|
|
|
parser.add_argument('-t', '--to', dest='to_prj', metavar='PROJECT',
|
|
|
|
help='project where to submit the updates to (default: %s)' % OPENSUSE,
|
|
|
|
default=OPENSUSE)
|
2016-05-25 13:38:50 +02:00
|
|
|
parser.add_argument('--only-from', dest='only_from', metavar='PROJECT', action ='append',
|
2016-04-29 10:44:56 +02:00
|
|
|
help='only submit packages that came from PROJECT')
|
|
|
|
parser.add_argument("--osc-debug", action="store_true", help="osc debug output")
|
2016-05-25 13:38:50 +02:00
|
|
|
parser.add_argument("--new", action="store_true", help="also submit new packages")
|
2016-05-02 10:13:59 +02:00
|
|
|
parser.add_argument('--cache-requests', action='store_true', default=False,
|
|
|
|
help='cache GET requests. Not recommended for daily use.')
|
|
|
|
parser.add_argument("packages", nargs='*', help="packages to check")
|
2015-05-27 17:15:05 +02:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
# Set logging configuration
|
|
|
|
logging.basicConfig(level=logging.DEBUG if args.debug
|
|
|
|
else logging.INFO)
|
|
|
|
|
2016-04-29 10:44:56 +02:00
|
|
|
if args.dry:
|
|
|
|
def dryrun(t, *args, **kwargs):
|
|
|
|
return lambda *args, **kwargs: logging.debug("dryrun %s %s %s", t, args, str(kwargs)[:200])
|
|
|
|
|
|
|
|
http_POST = dryrun('POST')
|
|
|
|
http_PUT = dryrun('PUT')
|
|
|
|
http_DELETE = dryrun('DELETE')
|
|
|
|
|
2015-05-27 17:15:05 +02:00
|
|
|
sys.exit(main(args))
|
2016-05-02 10:13:59 +02:00
|
|
|
|
|
|
|
# vim: sw=4 et
|