Merge pull request #1655 from jberry-suse/pseudometa-container
osclib/core: provide project_pseudometa_*() functions (and related)
This commit is contained in:
commit
d7d5724dae
@ -73,11 +73,11 @@ def list(args):
|
|||||||
|
|
||||||
if args.write:
|
if args.write:
|
||||||
api = staging_api(args)
|
api = staging_api(args)
|
||||||
api.dashboard_content_ensure('devel_projects', out, 'devel_projects write')
|
api.pseudometa_file_ensure('devel_projects', out, 'devel_projects write')
|
||||||
|
|
||||||
def devel_projects_load(args):
|
def devel_projects_load(args):
|
||||||
api = staging_api(args)
|
api = staging_api(args)
|
||||||
devel_projects = api.dashboard_content_load('devel_projects')
|
devel_projects = api.pseudometa_file_load('devel_projects')
|
||||||
|
|
||||||
if devel_projects:
|
if devel_projects:
|
||||||
return devel_projects.splitlines()
|
return devel_projects.splitlines()
|
||||||
@ -267,7 +267,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
parser_list = subparsers.add_parser('list', help='List devel projects.')
|
parser_list = subparsers.add_parser('list', help='List devel projects.')
|
||||||
parser_list.set_defaults(func=list)
|
parser_list.set_defaults(func=list)
|
||||||
parser_list.add_argument('-w', '--write', action='store_true', help='write to dashboard container package')
|
parser_list.add_argument('-w', '--write', action='store_true', help='write to pseudometa package')
|
||||||
|
|
||||||
parser_maintainer = subparsers.add_parser('maintainer', help='Check for relevant groups as maintainer.')
|
parser_maintainer = subparsers.add_parser('maintainer', help='Check for relevant groups as maintainer.')
|
||||||
parser_maintainer.set_defaults(func=maintainer)
|
parser_maintainer.set_defaults(func=maintainer)
|
||||||
|
@ -31,6 +31,7 @@ from xml.etree import cElementTree as ET
|
|||||||
import osc.conf
|
import osc.conf
|
||||||
import osc.core
|
import osc.core
|
||||||
from osclib.core import devel_project_get
|
from osclib.core import devel_project_get
|
||||||
|
from osclib.core import project_pseudometa_package
|
||||||
|
|
||||||
from osc import oscerr
|
from osc import oscerr
|
||||||
from osclib.memoize import memoize
|
from osclib.memoize import memoize
|
||||||
@ -319,7 +320,8 @@ class FccSubmitter(object):
|
|||||||
target_packages = self.get_source_packages(self.to_prj)
|
target_packages = self.get_source_packages(self.to_prj)
|
||||||
deleted_packages = self.get_deleted_packages(self.to_prj)
|
deleted_packages = self.get_deleted_packages(self.to_prj)
|
||||||
|
|
||||||
skip_pkgs_list = self.load_skip_pkgs_list('openSUSE:Factory:Staging', 'dashboard').splitlines()
|
pseudometa_project, pseudometa_package = project_pseudometa_package(self.apiurl, 'openSUSE:Factory')
|
||||||
|
skip_pkgs_list = self.load_skip_pkgs_list(pseudometa_project, pseudometa_package).splitlines()
|
||||||
|
|
||||||
ms_packages = [] # collect multi specs packages
|
ms_packages = [] # collect multi specs packages
|
||||||
|
|
||||||
|
13
metrics.py
13
metrics.py
@ -19,6 +19,7 @@ from osc.core import get_commitlog
|
|||||||
import osclib.conf
|
import osclib.conf
|
||||||
from osclib.cache import Cache
|
from osclib.cache import Cache
|
||||||
from osclib.conf import Config
|
from osclib.conf import Config
|
||||||
|
from osclib.core import project_pseudometa_package
|
||||||
from osclib.stagingapi import StagingAPI
|
from osclib.stagingapi import StagingAPI
|
||||||
|
|
||||||
SOURCE_DIR = os.path.dirname(os.path.realpath(__file__))
|
SOURCE_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
@ -354,9 +355,10 @@ def revision_index(api):
|
|||||||
if not hasattr(revision_index, 'index'):
|
if not hasattr(revision_index, 'index'):
|
||||||
revision_index.index = {}
|
revision_index.index = {}
|
||||||
|
|
||||||
|
project, package = project_pseudometa_package(api.apiurl, api.project)
|
||||||
try:
|
try:
|
||||||
root = ET.fromstringlist(
|
root = ET.fromstringlist(
|
||||||
get_commitlog(api.apiurl, api.cstaging, 'dashboard', None, format='xml'))
|
get_commitlog(api.apiurl, project, package, None, format='xml'))
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
return revision_index.index
|
return revision_index.index
|
||||||
|
|
||||||
@ -380,7 +382,7 @@ def dashboard_at(api, filename, datetime=None, revision=None):
|
|||||||
if not revision:
|
if not revision:
|
||||||
return revision
|
return revision
|
||||||
|
|
||||||
content = api.dashboard_content_load(filename, revision)
|
content = api.pseudometa_file_load(filename, revision)
|
||||||
if filename in ('ignored_requests'):
|
if filename in ('ignored_requests'):
|
||||||
if content:
|
if content:
|
||||||
return yaml.safe_load(content)
|
return yaml.safe_load(content)
|
||||||
@ -530,6 +532,7 @@ def main(args):
|
|||||||
client = InfluxDBClient(args.host, args.port, args.user, args.password, args.project)
|
client = InfluxDBClient(args.host, args.port, args.user, args.password, args.project)
|
||||||
|
|
||||||
osc.conf.get_config(override_apiurl=args.apiurl)
|
osc.conf.get_config(override_apiurl=args.apiurl)
|
||||||
|
apiurl = osc.conf.config['apiurl']
|
||||||
osc.conf.config['debug'] = args.debug
|
osc.conf.config['debug'] = args.debug
|
||||||
|
|
||||||
# Ensure database exists.
|
# Ensure database exists.
|
||||||
@ -540,16 +543,16 @@ def main(args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Use separate cache since it is persistent.
|
# Use separate cache since it is persistent.
|
||||||
|
_, package = project_pseudometa_package(apiurl, args.project)
|
||||||
Cache.CACHE_DIR = Cache.CACHE_DIR + '-metrics'
|
Cache.CACHE_DIR = Cache.CACHE_DIR + '-metrics'
|
||||||
if args.wipe_cache:
|
if args.wipe_cache:
|
||||||
Cache.delete_all()
|
Cache.delete_all()
|
||||||
if args.heavy_cache:
|
if args.heavy_cache:
|
||||||
Cache.PATTERNS['/search/request'] = sys.maxint
|
Cache.PATTERNS['/search/request'] = sys.maxint
|
||||||
Cache.PATTERNS['/source/[^/]+/dashboard/_history'] = sys.maxint
|
Cache.PATTERNS['/source/[^/]+/{}/_history'.format(package)] = sys.maxint
|
||||||
Cache.PATTERNS['/source/[^/]+/dashboard/[^/]+\?rev=.*'] = sys.maxint
|
Cache.PATTERNS['/source/[^/]+/{}/[^/]+\?rev=.*'.format(package)] = sys.maxint
|
||||||
Cache.init()
|
Cache.init()
|
||||||
|
|
||||||
apiurl = osc.conf.config['apiurl']
|
|
||||||
Config(apiurl, args.project)
|
Config(apiurl, args.project)
|
||||||
api = StagingAPI(apiurl, args.project)
|
api = StagingAPI(apiurl, args.project)
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ from osc.core import http_POST
|
|||||||
from osc.core import http_PUT
|
from osc.core import http_PUT
|
||||||
from osc.core import makeurl
|
from osc.core import makeurl
|
||||||
from osc.core import show_upstream_rev
|
from osc.core import show_upstream_rev
|
||||||
|
from osclib.core import project_pseudometa_package
|
||||||
from urllib2 import HTTPError
|
from urllib2 import HTTPError
|
||||||
import argparse
|
import argparse
|
||||||
import osc.conf
|
import osc.conf
|
||||||
@ -174,7 +175,8 @@ def clone_do(apiurl_source, apiurl_target, project):
|
|||||||
entity_clone(apiurl_source, apiurl_target, ['source', project + ':Rings:1-MinimalX', '_meta'],
|
entity_clone(apiurl_source, apiurl_target, ['source', project + ':Rings:1-MinimalX', '_meta'],
|
||||||
clone=project_clone)
|
clone=project_clone)
|
||||||
|
|
||||||
entity_clone(apiurl_source, apiurl_target, ['source', project + ':Staging', 'dashboard', '_meta'],
|
pseudometa_project, pseudometa_package = project_pseudometa_package(apiurl_source, project)
|
||||||
|
entity_clone(apiurl_source, apiurl_target, ['source', pseudometa_project, pseudometa_package, '_meta'],
|
||||||
clone=package_clone, after=package_clone_after)
|
clone=package_clone, after=package_clone_after)
|
||||||
|
|
||||||
entity_clone(apiurl_source, apiurl_target, ['source', project, 'drush', '_meta'],
|
entity_clone(apiurl_source, apiurl_target, ['source', project, 'drush', '_meta'],
|
||||||
|
@ -137,7 +137,7 @@ def clean_args(args):
|
|||||||
@cmdln.option('--try-strategies', action='store_true', default=False, help='apply strategies and keep any with desireable outcome')
|
@cmdln.option('--try-strategies', action='store_true', default=False, help='apply strategies and keep any with desireable outcome')
|
||||||
@cmdln.option('--strategy', help='apply a specific strategy')
|
@cmdln.option('--strategy', help='apply a specific strategy')
|
||||||
@cmdln.option('--no-color', action='store_true', help='strip colors from output (or add staging.color = 0 to the .oscrc general section')
|
@cmdln.option('--no-color', action='store_true', help='strip colors from output (or add staging.color = 0 to the .oscrc general section')
|
||||||
@cmdln.option('--save', action='store_true', help='save the result to the dashboard container')
|
@cmdln.option('--save', action='store_true', help='save the result to the pseudometa package')
|
||||||
@cmdln.option('--append', action='store_true', help='append to existing value')
|
@cmdln.option('--append', action='store_true', help='append to existing value')
|
||||||
@cmdln.option('--clear', action='store_true', help='clear value')
|
@cmdln.option('--clear', action='store_true', help='clear value')
|
||||||
def do_staging(self, subcmd, opts, *args):
|
def do_staging(self, subcmd, opts, *args):
|
||||||
@ -167,9 +167,9 @@ def do_staging(self, subcmd, opts, *args):
|
|||||||
|
|
||||||
"config" will modify or view staging specific configuration
|
"config" will modify or view staging specific configuration
|
||||||
|
|
||||||
Target project level configuration that applies to all stagings can be
|
Target project OSRT:Config attribute configuration applies to all
|
||||||
found in the $PROJECT:Staging/dashboard container in file "config". Both
|
stagings. Both configuration locations follow the .oscrc format (space
|
||||||
configuration locations follow the .oscrc format (space separated list).
|
separated list).
|
||||||
|
|
||||||
config
|
config
|
||||||
Print all staging configuration.
|
Print all staging configuration.
|
||||||
@ -477,7 +477,7 @@ def do_staging(self, subcmd, opts, *args):
|
|||||||
# Is it safe to accept? Meaning: /totest contains what it should and is not dirty
|
# Is it safe to accept? Meaning: /totest contains what it should and is not dirty
|
||||||
version_totest = api.get_binary_version(api.project, "openSUSE-release.rpm", repository="totest", arch="x86_64")
|
version_totest = api.get_binary_version(api.project, "openSUSE-release.rpm", repository="totest", arch="x86_64")
|
||||||
if version_totest:
|
if version_totest:
|
||||||
version_openqa = api.dashboard_content_load('version_totest')
|
version_openqa = api.pseudometa_file_load('version_totest')
|
||||||
totest_dirty = api.is_repo_dirty(api.project, 'totest')
|
totest_dirty = api.is_repo_dirty(api.project, 'totest')
|
||||||
print("version_openqa: %s / version_totest: %s / totest_dirty: %s\n" % (version_openqa, version_totest, totest_dirty))
|
print("version_openqa: %s / version_totest: %s / totest_dirty: %s\n" % (version_openqa, version_totest, totest_dirty))
|
||||||
else:
|
else:
|
||||||
@ -491,7 +491,7 @@ def do_staging(self, subcmd, opts, *args):
|
|||||||
version_openqa = version_totest
|
version_openqa = version_totest
|
||||||
totest_dirty = False
|
totest_dirty = False
|
||||||
else:
|
else:
|
||||||
version_openqa = api.dashboard_content_load('version_totest')
|
version_openqa = api.pseudometa_file_load('version_totest')
|
||||||
totest_dirty = api.is_repo_dirty(api.project, 'totest')
|
totest_dirty = api.is_repo_dirty(api.project, 'totest')
|
||||||
|
|
||||||
if version_openqa == version_totest and not totest_dirty:
|
if version_openqa == version_totest and not totest_dirty:
|
||||||
|
@ -13,6 +13,8 @@ from osc.core import change_request_state, show_package_meta, wipebinaries
|
|||||||
from osc.core import http_GET, http_PUT, http_DELETE, http_POST
|
from osc.core import http_GET, http_PUT, http_DELETE, http_POST
|
||||||
from osc.core import delete_package, search, set_devel_project
|
from osc.core import delete_package, search, set_devel_project
|
||||||
from osclib.config_command import ConfigCommand
|
from osclib.config_command import ConfigCommand
|
||||||
|
from osclib.core import source_file_load
|
||||||
|
from osclib.core import source_file_save
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
|
|
||||||
@ -73,7 +75,7 @@ class AcceptCommand(object):
|
|||||||
return rqs
|
return rqs
|
||||||
|
|
||||||
def reset_rebuild_data(self, project):
|
def reset_rebuild_data(self, project):
|
||||||
data = self.api.dashboard_content_load('support_pkg_rebuild')
|
data = self.api.pseudometa_file_load('support_pkg_rebuild')
|
||||||
if data is None:
|
if data is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -87,7 +89,7 @@ class AcceptCommand(object):
|
|||||||
# supportpkg list
|
# supportpkg list
|
||||||
content = ET.tostring(root)
|
content = ET.tostring(root)
|
||||||
if content != data:
|
if content != data:
|
||||||
self.api.dashboard_content_save('support_pkg_rebuild', content, 'accept command update')
|
self.api.pseudometa_file_save('support_pkg_rebuild', content, 'accept command update')
|
||||||
|
|
||||||
def virtually_accept_delete(self, request_id, package):
|
def virtually_accept_delete(self, request_id, package):
|
||||||
self.api.add_review(request_id, by_group=self.api.cdelreq_review, msg='Request accepted. Cleanup in progress - DO NOT REVOKE!')
|
self.api.add_review(request_id, by_group=self.api.cdelreq_review, msg='Request accepted. Cleanup in progress - DO NOT REVOKE!')
|
||||||
@ -171,10 +173,10 @@ class AcceptCommand(object):
|
|||||||
# intend to break the kiwi file
|
# intend to break the kiwi file
|
||||||
arch = package.split('-')[-1]
|
arch = package.split('-')[-1]
|
||||||
fakepkgname = 'I-am-breaks-kiwi-build'
|
fakepkgname = 'I-am-breaks-kiwi-build'
|
||||||
oldkiwifile = self.api.load_file_content(project, package, 'PRODUCT-'+arch+'.kiwi')
|
oldkiwifile = source_file_load(self.api.apiurl, project, package, 'PRODUCT-'+arch+'.kiwi')
|
||||||
if oldkiwifile is not None:
|
if oldkiwifile is not None:
|
||||||
newkiwifile = re.sub(r'<repopackage name="openSUSE-release"/>', '<repopackage name="%s"/>' % fakepkgname, oldkiwifile)
|
newkiwifile = re.sub(r'<repopackage name="openSUSE-release"/>', '<repopackage name="%s"/>' % fakepkgname, oldkiwifile)
|
||||||
self.api.save_file_content(project, package, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
source_file_save(self.api.apiurl, project, package, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
||||||
|
|
||||||
# do wipe binary now
|
# do wipe binary now
|
||||||
query = { 'cmd': 'wipe' }
|
query = { 'cmd': 'wipe' }
|
||||||
@ -250,7 +252,7 @@ class AcceptCommand(object):
|
|||||||
|
|
||||||
if len(filelist) > 1:
|
if len(filelist) > 1:
|
||||||
# There is more than one .spec file in the package; link package containers as needed
|
# There is more than one .spec file in the package; link package containers as needed
|
||||||
origmeta = self.api.load_file_content(project, pkgname, '_meta')
|
origmeta = source_file_load(self.api.apiurl, project, pkgname, '_meta')
|
||||||
for specfile in filelist:
|
for specfile in filelist:
|
||||||
package = specfile[:-5] # stripping .spec off the filename gives the packagename
|
package = specfile[:-5] # stripping .spec off the filename gives the packagename
|
||||||
if package == pkgname:
|
if package == pkgname:
|
||||||
@ -272,9 +274,9 @@ class AcceptCommand(object):
|
|||||||
newmeta = re.sub(r'</package>',
|
newmeta = re.sub(r'</package>',
|
||||||
r'<bcntsynctag>{}</bcntsynctag></package>'.format(pkgname),
|
r'<bcntsynctag>{}</bcntsynctag></package>'.format(pkgname),
|
||||||
newmeta)
|
newmeta)
|
||||||
self.api.save_file_content(project, package, '_meta', newmeta)
|
source_file_save(self.api.apiurl, project, package, '_meta', newmeta)
|
||||||
link = "<link package=\"{}\" cicount=\"copy\" />".format(pkgname)
|
link = "<link package=\"{}\" cicount=\"copy\" />".format(pkgname)
|
||||||
self.api.save_file_content(project, package, '_link', link)
|
source_file_save(self.api.apiurl, project, package, '_link', link)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def update_factory_version(self):
|
def update_factory_version(self):
|
||||||
|
@ -37,10 +37,6 @@ class CheckDuplicateBinariesCommand(object):
|
|||||||
|
|
||||||
current = yaml.dump(duplicates, default_flow_style=False)
|
current = yaml.dump(duplicates, default_flow_style=False)
|
||||||
if save:
|
if save:
|
||||||
args = ['{}:Staging'.format(self.api.project), 'dashboard', 'duplicate_binaries']
|
self.api.pseudometa_file_ensure('duplicate_binaries', current)
|
||||||
previous = self.api.load_file_content(*args)
|
|
||||||
if current != previous:
|
|
||||||
args.append(current)
|
|
||||||
self.api.save_file_content(*args)
|
|
||||||
else:
|
else:
|
||||||
print(current)
|
print(current)
|
||||||
|
@ -24,7 +24,6 @@ import operator
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from osc import conf
|
from osc import conf
|
||||||
from osclib.core import attribute_value_load
|
|
||||||
from osclib.memoize import memoize
|
from osclib.memoize import memoize
|
||||||
|
|
||||||
|
|
||||||
@ -51,6 +50,7 @@ DEFAULT = {
|
|||||||
'lock-ns': 'openSUSE',
|
'lock-ns': 'openSUSE',
|
||||||
'delreq-review': 'factory-maintainers',
|
'delreq-review': 'factory-maintainers',
|
||||||
'main-repo': 'standard',
|
'main-repo': 'standard',
|
||||||
|
'pseudometa_package': 'openSUSE:%(project)s:Staging/dashboard',
|
||||||
'download-baseurl': 'http://download.opensuse.org/tumbleweed/',
|
'download-baseurl': 'http://download.opensuse.org/tumbleweed/',
|
||||||
# check_source.py
|
# check_source.py
|
||||||
'check-source-single-action-require': 'True',
|
'check-source-single-action-require': 'True',
|
||||||
@ -80,6 +80,7 @@ DEFAULT = {
|
|||||||
'leaper-override-group': 'leap-reviewers',
|
'leaper-override-group': 'leap-reviewers',
|
||||||
'delreq-review': None,
|
'delreq-review': None,
|
||||||
'main-repo': 'standard',
|
'main-repo': 'standard',
|
||||||
|
'pseudometa_package': 'openSUSE:%(project)s:Staging/dashboard',
|
||||||
'download-baseurl': 'http://download.opensuse.org/distribution/leap/%(version)s/',
|
'download-baseurl': 'http://download.opensuse.org/distribution/leap/%(version)s/',
|
||||||
'download-baseurl-update': 'http://download.opensuse.org/update/leap/%(version)s/',
|
'download-baseurl-update': 'http://download.opensuse.org/update/leap/%(version)s/',
|
||||||
'check-source-add-review-team': 'False',
|
'check-source-add-review-team': 'False',
|
||||||
@ -246,6 +247,7 @@ class Config(object):
|
|||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
def fetch_remote(self, apiurl):
|
def fetch_remote(self, apiurl):
|
||||||
|
from osclib.core import attribute_value_load
|
||||||
config = attribute_value_load(apiurl, self.project, 'Config')
|
config = attribute_value_load(apiurl, self.project, 'Config')
|
||||||
if config:
|
if config:
|
||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
|
@ -2,6 +2,7 @@ from collections import namedtuple
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil.parser import parse as date_parse
|
from dateutil.parser import parse as date_parse
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
from lxml import etree as ETL
|
from lxml import etree as ETL
|
||||||
|
|
||||||
@ -15,11 +16,13 @@ from osc.core import get_binarylist
|
|||||||
from osc.core import get_dependson
|
from osc.core import get_dependson
|
||||||
from osc.core import http_GET
|
from osc.core import http_GET
|
||||||
from osc.core import http_POST
|
from osc.core import http_POST
|
||||||
|
from osc.core import http_PUT
|
||||||
from osc.core import makeurl
|
from osc.core import makeurl
|
||||||
from osc.core import owner
|
from osc.core import owner
|
||||||
from osc.core import Request
|
from osc.core import Request
|
||||||
from osc.core import show_package_meta
|
from osc.core import show_package_meta
|
||||||
from osc.core import show_project_meta
|
from osc.core import show_project_meta
|
||||||
|
from osclib.conf import Config
|
||||||
from osclib.memoize import memoize
|
from osclib.memoize import memoize
|
||||||
|
|
||||||
BINARY_REGEX = r'(?:.*::)?(?P<filename>(?P<name>.*)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^-\.]+))'
|
BINARY_REGEX = r'(?:.*::)?(?P<filename>(?P<name>.*)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^-\.]+))'
|
||||||
@ -268,6 +271,35 @@ def source_file_load(apiurl, project, package, filename, revision=None):
|
|||||||
except HTTPError:
|
except HTTPError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def source_file_save(apiurl, project, package, filename, content, comment=None):
|
||||||
|
if not comment:
|
||||||
|
comment = 'update by OSRT tools'
|
||||||
|
comment += ' (host {})'.format(socket.gethostname())
|
||||||
|
|
||||||
|
url = makeurl(apiurl, ['source', project, package, filename], {'comment': comment})
|
||||||
|
http_PUT(url, data=content)
|
||||||
|
|
||||||
|
def project_pseudometa_package(apiurl, project):
|
||||||
|
package = Config.get(apiurl, project).get('pseudometa_package', '00Meta')
|
||||||
|
if '/' in package:
|
||||||
|
project, package = package.split('/', 2)
|
||||||
|
|
||||||
|
return project, package
|
||||||
|
|
||||||
|
def project_pseudometa_file_load(apiurl, project, filename, revision=None):
|
||||||
|
project, package = project_pseudometa_package(apiurl, project)
|
||||||
|
return source_file_load(apiurl, project, package, filename, revision)
|
||||||
|
|
||||||
|
def project_pseudometa_file_save(apiurl, project, filename, content, comment=None):
|
||||||
|
project, package = project_pseudometa_package(apiurl, project)
|
||||||
|
source_file_save(apiurl, project, package, filename, content, comment)
|
||||||
|
|
||||||
|
def project_pseudometa_file_ensure(apiurl, project, filename, content, comment=None):
|
||||||
|
project_pseudometa, package = project_pseudometa_package(apiurl, project)
|
||||||
|
|
||||||
|
if content != project_pseudometa_file_load(apiurl, project_pseudometa, filename):
|
||||||
|
project_pseudometa_file_save(apiurl, project, filename, content, comment)
|
||||||
|
|
||||||
# Should be an API call that says give me "real" packages that does not include
|
# Should be an API call that says give me "real" packages that does not include
|
||||||
# multibuild entries nor linked packages.
|
# multibuild entries nor linked packages.
|
||||||
def package_list_without_links(apiurl, project):
|
def package_list_without_links(apiurl, project):
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
from osclib.config_command import ConfigCommand
|
from osclib.config_command import ConfigCommand
|
||||||
|
from osclib.core import source_file_load
|
||||||
|
from osclib.core import source_file_save
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
@ -170,12 +172,12 @@ class FreezeCommand(object):
|
|||||||
if not self.api.item_exists(project, product):
|
if not self.api.item_exists(project, product):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
kiwifile = self.api.load_file_content(project, product, 'PRODUCT-'+arch+'.kiwi')
|
kiwifile = source_file_load(self.api.apiurl, project, product, 'PRODUCT-'+arch+'.kiwi')
|
||||||
|
|
||||||
tmpkiwifile = re.sub(r'<productinfo name="VERSION">.*</productinfo>', '<productinfo name="VERSION">%s</productinfo>' % version, kiwifile)
|
tmpkiwifile = re.sub(r'<productinfo name="VERSION">.*</productinfo>', '<productinfo name="VERSION">%s</productinfo>' % version, kiwifile)
|
||||||
newkiwifile = re.sub(r'<productvar name="VERSION">.*</productvar>', '<productvar name="VERSION">%s</productvar>' % version, tmpkiwifile)
|
newkiwifile = re.sub(r'<productvar name="VERSION">.*</productvar>', '<productvar name="VERSION">%s</productvar>' % version, tmpkiwifile)
|
||||||
|
|
||||||
self.api.save_file_content(project, product, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
source_file_save(self.api.apiurl, project, product, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
||||||
|
|
||||||
def prj_meta_for_bootstrap_copy(self, prj):
|
def prj_meta_for_bootstrap_copy(self, prj):
|
||||||
root = ET.Element('project', {'name': prj})
|
root = ET.Element('project', {'name': prj})
|
||||||
|
@ -57,6 +57,9 @@ from osc.core import streamfile
|
|||||||
from osclib.cache import Cache
|
from osclib.cache import Cache
|
||||||
from osclib.core import devel_project_get
|
from osclib.core import devel_project_get
|
||||||
from osclib.core import project_list_prefix
|
from osclib.core import project_list_prefix
|
||||||
|
from osclib.core import project_pseudometa_file_load
|
||||||
|
from osclib.core import project_pseudometa_file_save
|
||||||
|
from osclib.core import project_pseudometa_file_ensure
|
||||||
from osclib.core import source_file_load
|
from osclib.core import source_file_load
|
||||||
from osclib.comments import CommentAPI
|
from osclib.comments import CommentAPI
|
||||||
from osclib.ignore_command import IgnoreCommand
|
from osclib.ignore_command import IgnoreCommand
|
||||||
@ -626,14 +629,14 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
@memoize(session=True)
|
@memoize(session=True)
|
||||||
def get_ignored_requests(self):
|
def get_ignored_requests(self):
|
||||||
ignore = self.dashboard_content_load('ignored_requests')
|
ignore = self.pseudometa_file_load('ignored_requests')
|
||||||
if ignore is None or not ignore:
|
if ignore is None or not ignore:
|
||||||
return {}
|
return {}
|
||||||
return yaml.safe_load(ignore)
|
return yaml.safe_load(ignore)
|
||||||
|
|
||||||
def set_ignored_requests(self, ignore_requests):
|
def set_ignored_requests(self, ignore_requests):
|
||||||
ignore = yaml.dump(ignore_requests, default_flow_style=False)
|
ignore = yaml.dump(ignore_requests, default_flow_style=False)
|
||||||
self.dashboard_content_ensure('ignored_requests', ignore)
|
self.pseudometa_file_ensure('ignored_requests', ignore)
|
||||||
|
|
||||||
@memoize(session=True, add_invalidate=True)
|
@memoize(session=True, add_invalidate=True)
|
||||||
def get_open_requests(self, query_extra=None):
|
def get_open_requests(self, query_extra=None):
|
||||||
@ -1241,8 +1244,8 @@ class StagingAPI(object):
|
|||||||
# If adi project, check for baselibs.conf in all specs to catch both
|
# If adi project, check for baselibs.conf in all specs to catch both
|
||||||
# dynamically generated and static baselibs.conf.
|
# dynamically generated and static baselibs.conf.
|
||||||
baselibs = False if self.is_adi_project(project) else None
|
baselibs = False if self.is_adi_project(project) else None
|
||||||
if baselibs is False and 'baselibs.conf' in str(self.load_file_content(
|
if baselibs is False and 'baselibs.conf' in str(source_file_load(
|
||||||
src_prj, src_pkg, '{}.spec'.format(src_pkg), src_rev)):
|
self.apiurl, src_prj, src_pkg, '{}.spec'.format(src_pkg), src_rev)):
|
||||||
baselibs = True
|
baselibs = True
|
||||||
|
|
||||||
for sub_prj, sub_pkg in self.get_sub_packages(tar_pkg, project):
|
for sub_prj, sub_pkg in self.get_sub_packages(tar_pkg, project):
|
||||||
@ -1258,8 +1261,8 @@ class StagingAPI(object):
|
|||||||
url = self.makeurl(['source', sub_prj, sub_pkg, '_link'])
|
url = self.makeurl(['source', sub_prj, sub_pkg, '_link'])
|
||||||
http_PUT(url, data=ET.tostring(root))
|
http_PUT(url, data=ET.tostring(root))
|
||||||
|
|
||||||
if baselibs is False and 'baselibs.conf' in str(self.load_file_content(
|
if baselibs is False and 'baselibs.conf' in str(source_file_load(
|
||||||
src_prj, src_pkg, '{}.spec'.format(sub_pkg), src_rev)):
|
self.apiurl, src_prj, src_pkg, '{}.spec'.format(sub_pkg), src_rev)):
|
||||||
baselibs = True
|
baselibs = True
|
||||||
|
|
||||||
if baselibs:
|
if baselibs:
|
||||||
@ -1464,7 +1467,7 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
version = None
|
version = None
|
||||||
|
|
||||||
specfile = self.load_file_content(project, package, '{}.spec'.format(package))
|
specfile = source_file_load(self.apiurl, project, package, '{}.spec'.format(package))
|
||||||
if specfile:
|
if specfile:
|
||||||
try:
|
try:
|
||||||
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
||||||
@ -1484,36 +1487,14 @@ class StagingAPI(object):
|
|||||||
return None
|
return None
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def load_file_content(self, project, package, filename, revision=None):
|
def pseudometa_file_load(self, filename, revision=None):
|
||||||
"""
|
return project_pseudometa_file_load(self.apiurl, self.project, filename, revision)
|
||||||
Load the content of a file and return the content as data. If the package is a link, it will be expanded
|
|
||||||
:param project: The project to query
|
|
||||||
:param package: The package to quert
|
|
||||||
:param filename: The filename to query
|
|
||||||
:param revision: The revision to query
|
|
||||||
"""
|
|
||||||
return source_file_load(self.apiurl, project, package, filename, revision)
|
|
||||||
|
|
||||||
def save_file_content(self, project, package, filename, content, comment='script updated'):
|
def pseudometa_file_save(self, filename, content, comment=None):
|
||||||
"""
|
project_pseudometa_file_save(self.apiurl, self.project, filename, content, comment)
|
||||||
Save content to a project/package/file
|
|
||||||
:param project: The project containing the package
|
|
||||||
:param package: the package to update
|
|
||||||
:param filename: the filename to save the data to
|
|
||||||
:param content: the content to write to the file
|
|
||||||
"""
|
|
||||||
url = self.makeurl(['source', project, package, filename], {'comment': comment})
|
|
||||||
http_PUT(url, data=content)
|
|
||||||
|
|
||||||
def dashboard_content_load(self, filename, revision=None):
|
def pseudometa_file_ensure(self, filename, content, comment=None):
|
||||||
return self.load_file_content(self.cstaging, 'dashboard', filename, revision)
|
project_pseudometa_file_ensure(self.apiurl, self.project, filename, content, comment)
|
||||||
|
|
||||||
def dashboard_content_save(self, filename, content, comment='script updated'):
|
|
||||||
return self.save_file_content(self.cstaging, 'dashboard', filename, content, comment)
|
|
||||||
|
|
||||||
def dashboard_content_ensure(self, filename, content, comment='script updated'):
|
|
||||||
if content != self.dashboard_content_load(filename):
|
|
||||||
self.dashboard_content_save(filename, content, comment)
|
|
||||||
|
|
||||||
def attribute_value_load(self, name):
|
def attribute_value_load(self, name):
|
||||||
return attribute_value_load(self.apiurl, self.project, name)
|
return attribute_value_load(self.apiurl, self.project, name)
|
||||||
|
@ -23,6 +23,7 @@ from osclib.core import depends_on
|
|||||||
from osclib.core import devel_project_fallback
|
from osclib.core import devel_project_fallback
|
||||||
from osclib.core import fileinfo_ext_all
|
from osclib.core import fileinfo_ext_all
|
||||||
from osclib.core import package_binary_list
|
from osclib.core import package_binary_list
|
||||||
|
from osclib.core import project_pseudometa_package
|
||||||
from osclib.core import request_staged
|
from osclib.core import request_staged
|
||||||
from osclib.core import target_archs
|
from osclib.core import target_archs
|
||||||
from osclib.cycle import CycleDetector
|
from osclib.cycle import CycleDetector
|
||||||
@ -64,9 +65,9 @@ class RepoChecker(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
build = ET.fromstringlist(show_results_meta(
|
build = ET.fromstringlist(show_results_meta(
|
||||||
self.apiurl, project, multibuild=True, repository=['standard'])).get('state')
|
self.apiurl, project, multibuild=True, repository=['standard'])).get('state')
|
||||||
dashboard_content = api.dashboard_content_load('repo_checker')
|
pseudometa_content = api.pseudometa_file_load('repo_checker')
|
||||||
if not self.force and dashboard_content:
|
if not self.force and pseudometa_content:
|
||||||
build_previous = dashboard_content.splitlines()[0]
|
build_previous = pseudometa_content.splitlines()[0]
|
||||||
if build == build_previous:
|
if build == build_previous:
|
||||||
self.logger.info('{} build unchanged'.format(project))
|
self.logger.info('{} build unchanged'.format(project))
|
||||||
return
|
return
|
||||||
@ -91,7 +92,7 @@ class RepoChecker(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
text = '\n'.join(comment).strip()
|
text = '\n'.join(comment).strip()
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
api.dashboard_content_ensure('repo_checker', text + '\n', 'project_only run')
|
api.pseudometa_file_ensure('repo_checker', text + '\n', 'project_only run')
|
||||||
else:
|
else:
|
||||||
print(text)
|
print(text)
|
||||||
|
|
||||||
@ -349,7 +350,7 @@ class RepoChecker(ReviewBot.ReviewBot):
|
|||||||
binaries = set()
|
binaries = set()
|
||||||
|
|
||||||
api = self.staging_api(project)
|
api = self.staging_api(project)
|
||||||
content = api.dashboard_content_load('repo_checker')
|
content = api.pseudometa_file_load('repo_checker')
|
||||||
if not content:
|
if not content:
|
||||||
self.logger.warn('no project_only run from which to extract existing problems')
|
self.logger.warn('no project_only run from which to extract existing problems')
|
||||||
return binaries
|
return binaries
|
||||||
@ -414,7 +415,9 @@ class RepoChecker(ReviewBot.ReviewBot):
|
|||||||
if stderr:
|
if stderr:
|
||||||
parts.append('<pre>\n' + stderr + '\n' + '</pre>\n')
|
parts.append('<pre>\n' + stderr + '\n' + '</pre>\n')
|
||||||
|
|
||||||
header = '### [install check & file conflicts](/package/view_file/{}:Staging/dashboard/repo_checker)\n\n'.format(project)
|
pseudometa_project, pseudometa_package = project_pseudometa_package(self.apiurl, project)
|
||||||
|
path = ['package', 'view_file', pseudometa_project, pseudometa_package, 'repo_checker']
|
||||||
|
header = '### [install check & file conflicts](/{})\n\n'.format('/'.join(path))
|
||||||
return CheckResult(False, header + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
return CheckResult(False, header + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ class StagingHelper(object):
|
|||||||
|
|
||||||
def crawl(self):
|
def crawl(self):
|
||||||
"""Main method"""
|
"""Main method"""
|
||||||
rebuild_data = self.api.dashboard_content_load('support_pkg_rebuild')
|
rebuild_data = self.api.pseudometa_file_load('support_pkg_rebuild')
|
||||||
if rebuild_data is None:
|
if rebuild_data is None:
|
||||||
print "There is no support_pkg_rebuild file!"
|
print "There is no support_pkg_rebuild file!"
|
||||||
return
|
return
|
||||||
@ -173,7 +173,7 @@ class StagingHelper(object):
|
|||||||
rebuild_data_updated = ET.tostring(root)
|
rebuild_data_updated = ET.tostring(root)
|
||||||
logging.debug(rebuild_data_updated)
|
logging.debug(rebuild_data_updated)
|
||||||
if rebuild_data_updated != rebuild_data:
|
if rebuild_data_updated != rebuild_data:
|
||||||
self.api.dashboard_content_save(
|
self.api.pseudometa_file_save(
|
||||||
'support_pkg_rebuild', rebuild_data_updated, 'support package rebuild')
|
'support_pkg_rebuild', rebuild_data_updated, 'support package rebuild')
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=openSUSE Release Tools: staging-bot devel list (stored in dashboard)
|
Description=openSUSE Release Tools: staging-bot devel list (stored in pseudometa package)
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
User=osrt-staging-bot
|
User=osrt-staging-bot
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=openSUSE Release Tools: staging-bot devel list (stored in dashboard)
|
Description=openSUSE Release Tools: staging-bot devel list (stored in pseudometa package)
|
||||||
|
|
||||||
[Timer]
|
[Timer]
|
||||||
OnBootSec=120
|
OnBootSec=120
|
||||||
|
@ -154,7 +154,7 @@ class ToTestBase(object):
|
|||||||
raise NotFoundException("can't find %s version" % self.project)
|
raise NotFoundException("can't find %s version" % self.project)
|
||||||
|
|
||||||
def current_qa_version(self):
|
def current_qa_version(self):
|
||||||
return self.api.dashboard_content_load('version_totest')
|
return self.api.pseudometa_file_load('version_totest')
|
||||||
|
|
||||||
def find_openqa_results(self, snapshot):
|
def find_openqa_results(self, snapshot):
|
||||||
"""Return the openqa jobs of a given snapshot and filter out the
|
"""Return the openqa jobs of a given snapshot and filter out the
|
||||||
@ -614,7 +614,7 @@ class ToTestBase(object):
|
|||||||
|
|
||||||
def write_version_to_dashboard(self, target, version):
|
def write_version_to_dashboard(self, target, version):
|
||||||
if not (self.dryrun or self.norelease):
|
if not (self.dryrun or self.norelease):
|
||||||
self.api.dashboard_content_ensure('version_%s' % target, version, comment='Update version')
|
self.api.pseudometa_file_ensure('version_%s' % target, version, comment='Update version')
|
||||||
|
|
||||||
|
|
||||||
class ToTestBaseNew(ToTestBase):
|
class ToTestBaseNew(ToTestBase):
|
||||||
@ -896,7 +896,7 @@ class ToTest150Images(ToTestBaseNew):
|
|||||||
return 'openSUSE Leap 15.0 Images'
|
return 'openSUSE Leap 15.0 Images'
|
||||||
|
|
||||||
def current_qa_version(self):
|
def current_qa_version(self):
|
||||||
return self.api.dashboard_content_load('version_totest_images')
|
return self.api.pseudometa_file_load('version_totest_images')
|
||||||
|
|
||||||
def write_version_to_dashboard(self, target, version):
|
def write_version_to_dashboard(self, target, version):
|
||||||
super(ToTest150Images, self).write_version_to_dashboard('{}_images'.format(target), version)
|
super(ToTest150Images, self).write_version_to_dashboard('{}_images'.format(target), version)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user