1
0
mirror of https://github.com/openSUSE/osc.git synced 2024-11-10 14:56:14 +01:00
github.com_openSUSE_osc/osc/build.py

762 lines
30 KiB
Python
Raw Normal View History

# Copyright (C) 2006 Novell Inc. All rights reserved.
# This program is free software; it may be used, copied, modified
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
import os
2008-03-11 16:18:02 +01:00
import re
import sys
from tempfile import NamedTemporaryFile
from shutil import rmtree
from osc.fetch import *
from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir
from osc.util import rpmquery, debquery
import osc.conf
import oscerr
import subprocess
try:
from xml.etree import cElementTree as ET
except ImportError:
import cElementTree as ET
from conf import config, cookiejar
change_personality = {
'i686': 'linux32',
'i586': 'linux32',
'i386': 'linux32',
'ppc': 'powerpc32',
's390': 's390',
}
2009-10-20 16:30:15 +02:00
can_also_build = {
'armv4l': [ 'armv4l' ],
'armv5el':[ 'armv4l', 'armv5el' ],
'armv6el':[ 'armv4l', 'armv5el', 'armv6el' ],
'armv6l' :[ 'armv4l', 'armv5el', 'armv6el' ],
'armv7el':[ 'armv4l', 'armv5el', 'armv6el', 'armv7el' ],
'armv7l' :[ 'armv4l', 'armv5el', 'armv6el', 'armv7el' ],
'armv8el':[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el'],
'armv8l' :[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el'],
's390x': ['s390' ],
'ppc64': [ 'ppc', 'ppc64', ],
'i386': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4' ,'mips' ],
'i586': [ 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4' ,'mips' ],
'i686': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4' ,'mips' ],
'x86_64': ['i686', 'i586', 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4' ,'mips' ],
}
2006-07-17 15:53:17 +02:00
# real arch of this machine
hostarch = os.uname()[4]
if hostarch == 'i686': # FIXME
hostarch = 'i586'
class Buildinfo:
"""represent the contents of a buildinfo file"""
def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = []):
try:
tree = ET.parse(filename)
except:
2008-07-04 14:56:32 +02:00
print >>sys.stderr, 'could not parse the buildinfo:'
print >>sys.stderr, open(filename).read()
sys.exit(1)
root = tree.getroot()
2010-02-24 20:02:52 +01:00
self.apiurl = apiurl
if root.find('error') != None:
2008-01-11 17:44:08 +01:00
sys.stderr.write('buildinfo is broken... it says:\n')
error = root.find('error').text
sys.stderr.write(error + '\n')
sys.exit(1)
if not (apiurl.startswith('https://') or apiurl.startswith('http://')):
raise urllib2.URLError('invalid protocol for the apiurl: \'%s\'' % apiurl)
self.buildtype = buildtype
self.apiurl = apiurl
# are we building .rpm or .deb?
# XXX: shouldn't we deliver the type via the buildinfo?
self.pacsuffix = 'rpm'
if self.buildtype == 'dsc':
self.pacsuffix = 'deb'
self.buildarch = root.find('arch').text
if root.find('release') != None:
2009-10-20 16:30:15 +02:00
self.release = root.find('release').text
2009-12-01 14:36:11 +01:00
else:
self.release = None
self.downloadurl = root.get('downloadurl')
self.debuginfo = 0
if root.find('debuginfo') != None:
try:
2009-03-12 17:35:40 +01:00
self.debuginfo = int(root.find('debuginfo').text)
except ValueError:
pass
self.deps = []
self.projects = {}
self.keys = []
2009-11-11 15:48:25 +01:00
self.prjkeys = []
for node in root.findall('bdep'):
p = Pac(node, self.buildarch, self.pacsuffix,
apiurl, localpkgs)
if p.project:
self.projects[p.project] = 1
self.deps.append(p)
self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ]
self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ]
self.runscripts_list = [ dep.name for dep in self.deps if dep.runscripts ]
def has_dep(self, name):
for i in self.deps:
if i.name == name:
return True
return False
def remove_dep(self, name):
for i in self.deps:
if i.name == name:
self.deps.remove(i)
return True
return False
class Pac:
"""represent a package to be downloaded
We build a map that's later used to fill our URL templates
"""
def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs = []):
self.mp = {}
2009-10-20 16:30:15 +02:00
for i in ['name', 'package',
'version', 'release',
'project', 'repository',
'preinstall', 'vminstall', 'noinstall', 'runscripts',
]:
self.mp[i] = node.get(i)
self.mp['buildarch'] = buildarch
self.mp['pacsuffix'] = pacsuffix
self.mp['arch'] = node.get('arch') or self.mp['buildarch']
# this is not the ideal place to check if the package is a localdep or not
localdep = self.mp['name'] in localpkgs
if not localdep and not (node.get('project') and node.get('repository')):
raise oscerr.APIError('incomplete information for package %s, may be caused by a broken project configuration.'
% self.mp['name'] )
if not localdep:
self.mp['extproject'] = node.get('project').replace(':', ':/')
self.mp['extrepository'] = node.get('repository').replace(':', ':/')
self.mp['repopackage'] = node.get('package') or '_repository'
self.mp['repoarch'] = node.get('repoarch') or self.mp['buildarch']
if pacsuffix == 'deb' and not (self.mp['name'] and self.mp['arch'] and self.mp['version']):
raise oscerr.APIError(
2009-10-20 16:30:15 +02:00
"buildinfo for package %s/%s/%s is incomplete"
% (self.mp['name'], self.mp['arch'], self.mp['version']))
self.mp['apiurl'] = apiurl
if pacsuffix == 'deb':
self.filename = debquery.DebQuery.filename(self.mp['name'], self.mp['version'], self.mp['release'], self.mp['arch'])
2009-09-13 18:57:40 +02:00
else:
self.filename = rpmquery.RpmQuery.filename(self.mp['name'], self.mp['version'], self.mp['release'], self.mp['arch'])
self.mp['filename'] = self.filename
if self.mp['repopackage'] == '_repository':
2009-09-03 21:00:09 +02:00
self.mp['repofilename'] = self.mp['name']
else:
self.mp['repofilename'] = self.mp['filename']
# make the content of the dictionary accessible as class attributes
self.__dict__.update(self.mp)
def makeurls(self, cachedir, urllist):
self.urllist = []
# build up local URL
# by using the urlgrabber with local urls, we basically build up a cache.
# the cache has no validation, since the package servers don't support etags,
# or if-modified-since, so the caching is simply name-based (on the assumption
# that the filename is suitable as identifier)
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch)
self.fullfilename = os.path.join(self.localdir, self.filename)
self.url_local = 'file://%s/' % self.fullfilename
2009-10-20 16:30:15 +02:00
# first, add the local URL
self.urllist.append(self.url_local)
# remote URLs
for url in urllist:
self.urllist.append(url % self.mp)
def __str__(self):
return self.name
def __repr__(self):
return "%s" % self.name
def get_built_files(pacdir, pactype):
if pactype == 'rpm':
2009-10-20 16:30:15 +02:00
b_built = subprocess.Popen(['find', os.path.join(pacdir, 'RPMS'),
'-name', '*.rpm'],
stdout=subprocess.PIPE).stdout.read().strip()
2009-10-20 16:30:15 +02:00
s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SRPMS'),
'-name', '*.rpm'],
stdout=subprocess.PIPE).stdout.read().strip()
elif pactype == 'kiwi':
2009-10-20 16:30:15 +02:00
b_built = subprocess.Popen(['find', os.path.join(pacdir, 'KIWI'),
'-type', 'f'],
stdout=subprocess.PIPE).stdout.read().strip()
else:
b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DEBS'),
'-name', '*.deb'],
stdout=subprocess.PIPE).stdout.read().strip()
2009-10-20 16:30:15 +02:00
s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SOURCES.DEB'),
'-type', 'f'],
stdout=subprocess.PIPE).stdout.read().strip()
return s_built, b_built
def get_repo(path):
"""Walks up path looking for any repodata directories.
2010-02-28 02:30:13 +01:00
@param path path to a directory
@return str path to repository directory containing repodata directory
"""
oldDirectory = None
currentDirectory = os.path.abspath(path)
repositoryDirectory = None
2010-02-28 02:30:13 +01:00
# while there are still parent directories
while currentDirectory != oldDirectory:
children = os.listdir(currentDirectory)
2010-02-28 02:30:13 +01:00
if "repodata" in children:
repositoryDirectory = currentDirectory
break
2010-02-28 02:30:13 +01:00
# ascend
oldDirectory = currentDirectory
currentDirectory = os.path.abspath(os.path.join(oldDirectory,
os.pardir))
2010-02-28 02:30:13 +01:00
return repositoryDirectory
def get_prefer_pkgs(dirs, wanted_arch, type):
import glob
from util import repodata, packagequery, cpio
paths = []
2010-02-05 15:14:48 +01:00
repositories = []
2010-02-28 02:30:13 +01:00
suffix = '*.rpm'
if type == 'dsc':
suffix = '*.deb'
2010-02-28 02:30:13 +01:00
for dir in dirs:
# check for repodata
repository = get_repo(dir)
if repository is None:
paths += glob.glob(os.path.join(os.path.abspath(dir), suffix))
else:
repositories.append(repository)
2010-02-28 02:30:13 +01:00
2010-02-05 15:14:48 +01:00
packageQueries = packagequery.PackageQueries(wanted_arch)
2010-02-28 02:30:13 +01:00
for repository in repositories:
2010-02-05 15:14:48 +01:00
repodataPackageQueries = repodata.queries(repository)
2010-02-28 02:30:13 +01:00
for packageQuery in repodataPackageQueries:
packageQueries.add(packageQuery)
2010-02-28 02:30:13 +01:00
for path in paths:
if path.endswith('src.rpm'):
continue
if path.find('-debuginfo-') > 0:
continue
packageQuery = packagequery.PackageQuery.query(path)
packageQueries.add(packageQuery)
2010-02-28 02:30:13 +01:00
prefer_pkgs = dict((name, packageQuery.path())
2010-02-05 15:14:48 +01:00
for name, packageQuery in packageQueries.iteritems())
2010-02-28 02:30:13 +01:00
depfile = create_deps(packageQueries.values())
cpio = cpio.CpioWrite()
cpio.add('deps', '\n'.join(depfile))
return prefer_pkgs, cpio
def create_deps(pkgqs):
"""
creates a list of requires/provides which corresponds to build's internal
dependency file format
"""
depfile = []
for p in pkgqs:
id = '%s.%s-0/0/0: ' % (p.name(), p.arch())
depfile.append('R:%s%s' % (id, ' '.join(p.requires())))
depfile.append('P:%s%s' % (id, ' '.join(p.provides())))
return depfile
def main(opts, argv):
repo = argv[0]
arch = argv[1]
build_descr = argv[2]
xp = []
build_root = None
2010-01-09 14:25:41 +01:00
build_descr = os.path.abspath(build_descr)
build_type = os.path.splitext(build_descr)[1][1:]
if build_type not in ['spec', 'dsc', 'kiwi']:
raise oscerr.WrongArgs(
2010-01-09 14:25:41 +01:00
'Unknown build type: \'%s\'. Build description should end in .spec, .dsc or .kiwi.' \
% build_type)
2010-01-09 14:25:41 +01:00
if not os.path.isfile(build_descr):
raise oscerr.WrongArgs('Error: build description file named \'%s\' does not exist.' % build_descr)
buildargs = []
if not opts.userootforbuild:
buildargs.append('--norootforbuild')
if opts.clean:
buildargs.append('--clean')
if opts.noinit:
buildargs.append('--noinit')
if opts.nochecks:
buildargs.append('--no-checks')
if not opts.no_changelog:
buildargs.append('--changelog')
if opts.root:
build_root = opts.root
if opts.jobs:
buildargs.append('--jobs %s' % opts.jobs)
elif config['build-jobs'] > 1:
buildargs.append('--jobs %s' % config['build-jobs'])
if opts.icecream or config['icecream'] != '0':
if opts.icecream:
num = opts.icecream
else:
num = config['icecream']
if int(num) > 0:
buildargs.append('--icecream %s' % num)
xp.append('icecream')
xp.append('gcc-c++')
if opts.ccache:
buildargs.append('--ccache')
xp.append('ccache')
if opts.linksources:
buildargs.append('--linksources')
2008-05-08 14:21:57 +02:00
if opts.baselibs:
buildargs.append('--baselibs')
if opts.debuginfo:
buildargs.append('--debug')
if opts._with:
2009-11-24 10:49:26 +01:00
for o in opts._with:
buildargs.append('--with %s' % o)
if opts.without:
2009-11-24 10:49:26 +01:00
for o in opts.without:
buildargs.append('--without %s' % o)
build_uid=''
if config['build-uid']:
build_uid = config['build-uid']
if opts.build_uid:
build_uid = opts.build_uid
if build_uid:
buildidre = re.compile('^[0-9]{1,5}:[0-9]{1,5}$')
if build_uid == 'caller':
buildargs.append('--uid %s:%s' % (os.getuid(), os.getgid()))
elif buildidre.match(build_uid):
buildargs.append('--uid %s' % build_uid)
else:
print >>sys.stderr, 'Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"'
return 1
# FIXME: quoting
# if opts.define:
# buildargs.append('--define "%s"' % opts.define)
if opts.alternative_project:
prj = opts.alternative_project
pac = '_repository'
else:
prj = store_read_project(os.curdir)
if opts.local_package:
pac = '_repository'
else:
pac = store_read_package(os.curdir)
2010-03-23 10:04:01 +01:00
apiurl = store_read_apiurl(os.curdir)
# make it possible to override configuration of the rc file
2009-10-20 16:30:15 +02:00
for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']:
val = os.getenv(var)
if val:
if var.startswith('OSC_'): var = var[4:]
var = var.lower().replace('_', '-')
if config.has_key(var):
print 'Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val)
config[var] = val
pacname = pac
if pacname == '_repository':
if opts.local_package:
pacname = os.path.splitext(build_descr)[0]
else:
pacname = store_read_package(os.curdir)
if not build_root:
build_root = config['build-root'] % { 'repo': repo, 'arch': arch,
'project' : prj, 'package' : pacname
}
2009-11-13 14:24:33 +01:00
extra_pkgs = []
if not opts.extra_pkgs:
extra_pkgs = config['extra-pkgs']
2009-11-13 14:24:33 +01:00
elif opts.extra_pkgs != ['']:
extra_pkgs = opts.extra_pkgs
if xp:
extra_pkgs += xp
prefer_pkgs = {}
build_descr_data = open(build_descr).read()
# XXX: dirty hack but there's no api to provide custom defines
if opts.without:
s = ''
for i in opts.without:
s += "%%define _without_%s 1\n" % i
s += "%%define _with_%s 0\n" % i
build_descr_data = s + build_descr_data
if opts._with:
s = ''
for i in opts._with:
s += "%%define _without_%s 0\n" % i
s += "%%define _with_%s 1\n" % i
build_descr_data = s + build_descr_data
if opts.prefer_pkgs:
print 'Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs)
prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type)
cpio.add(os.path.basename(build_descr), build_descr_data)
build_descr_data = cpio.get()
2010-01-09 14:25:41 +01:00
# special handling for overlay and rsync-src/dest
specialcmdopts = ''
if opts.rsyncsrc or opts.rsyncdest :
if not opts.rsyncsrc or not opts.rsyncdest:
raise oscerr.WrongOptions('When using --rsync-{src,dest} both parameters have to be specified.')
myrsyncsrc = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.rsyncsrc)))
if not os.path.isdir(myrsyncsrc):
raise oscerr.WrongOptions('--rsync-src %s is no valid directory!' % opts.rsyncsrc)
# can't check destination - its in the target chroot ;) - but we can check for sanity
myrsyncdest = os.path.expandvars(opts.rsyncdest)
if not os.path.isabs(myrsyncdest):
raise oscerr.WrongOptions('--rsync-dest %s is no absolute path (starting with \'/\')!' % opts.rsyncdest)
specialcmdopts = '--rsync-src="%s" --rsync-dest="%s"' % (myrsyncsrc, myrsyncdest)
if opts.overlay:
myoverlay = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.overlay)))
if not os.path.isdir(myoverlay):
raise oscerr.WrongOptions('--overlay %s is no valid directory!' % opts.overlay)
specialcmdopts += '--overlay="%s"' % myoverlay
2009-11-26 10:52:26 +01:00
bi_file = None
bc_file = None
bi_filename = '_buildinfo-%s-%s.xml' % (repo, arch)
bc_filename = '_buildconfig-%s-%s' % (repo, arch)
if is_package_dir('.') and os.access(osc.core.store, os.W_OK):
2009-11-26 10:52:26 +01:00
bi_filename = os.path.join(os.getcwd(), osc.core.store, bi_filename)
bc_filename = os.path.join(os.getcwd(), osc.core.store, bc_filename)
elif not os.access('.', os.W_OK):
bi_file = NamedTemporaryFile(prefix=bi_filename)
bi_filename = bi_file.name
bc_file = NamedTemporaryFile(prefix=bc_filename)
bc_filename = bc_file.name
else:
bi_filename = os.path.abspath(bi_filename)
bc_filename = os.path.abspath(bc_filename)
2009-11-26 10:52:26 +01:00
try:
if opts.noinit:
if not os.path.isfile(bi_filename):
2010-01-09 14:25:41 +01:00
raise oscerr.WrongOptions('--noinit is not possible, no local buildinfo file')
print 'Use local \'%s\' file as buildinfo' % bi_filename
if not os.path.isfile(bc_filename):
2010-01-09 14:25:41 +01:00
raise oscerr.WrongOptions('--noinit is not possible, no local buildconfig file')
print 'Use local \'%s\' file as buildconfig' % bc_filename
else:
print 'Getting buildinfo from server and store to %s' % bi_filename
2009-11-26 10:52:26 +01:00
if not bi_file:
bi_file = open(bi_filename, 'w')
2009-10-20 16:30:15 +02:00
bi_text = ''.join(get_buildinfo(apiurl,
prj,
pac,
repo,
arch,
specfile=build_descr_data,
addlist=extra_pkgs))
bi_file.write(bi_text)
2009-11-26 10:52:26 +01:00
bi_file.flush()
print 'Getting buildconfig from server and store to %s' % bc_filename
2009-11-26 10:52:26 +01:00
if not bc_file:
bc_file = open(bc_filename, 'w')
bc_file.write(get_buildconfig(apiurl, prj, pac, repo, arch))
2009-11-26 10:52:26 +01:00
bc_file.flush()
except urllib2.HTTPError, e:
if e.code == 404:
# check what caused the 404
if meta_exists(metatype='prj', path_args=(quote_plus(prj), ),
template_args=None, create_new=False, apiurl=apiurl):
pkg_meta_e = None
try:
# take care, not to run into double trouble.
pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj),
quote_plus(pac)), template_args=None, create_new=False,
apiurl=apiurl)
except:
pass
if pac == '_repository' or pkg_meta_e:
print >>sys.stderr, 'ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error'
else:
print >>sys.stderr, 'The package \'%s\' does not exists - please ' \
'rerun with \'--local-package\'' % pac
else:
print >>sys.stderr, 'The project \'%s\' does not exists - please ' \
'rerun with \'--alternative-project <alternative_project>\'' % prj
sys.exit(1)
else:
raise
bi = Buildinfo(bi_filename, apiurl, build_type, prefer_pkgs.keys())
2009-12-01 14:36:11 +01:00
2010-02-18 14:36:41 +01:00
if bi.debuginfo and not (opts.disable_debuginfo or '--debug' in buildargs):
buildargs.append('--debug')
2009-12-01 14:36:11 +01:00
if opts.release:
bi.release = opts.release
if bi.release:
buildargs.append('--release %s' % bi.release)
buildargs = ' '.join(buildargs)
2009-10-20 16:30:15 +02:00
# real arch of this machine
# vs.
# arch we are supposed to build for
if hostarch != bi.buildarch:
if not bi.buildarch in can_also_build.get(hostarch, []):
print >>sys.stderr, 'Error: hostarch \'%s\' cannot build \'%s\'.' % (hostarch, bi.buildarch)
return 1
rpmlist_prefers = []
if prefer_pkgs:
print 'Evaluating preferred packages'
for name, path in prefer_pkgs.iteritems():
if bi.has_dep(name):
# We remove a preferred package from the buildinfo, so that the
# fetcher doesn't take care about them.
# Instead, we put it in a list which is appended to the rpmlist later.
# At the same time, this will make sure that these packages are
# not verified.
bi.remove_dep(name)
rpmlist_prefers.append((name, path))
print ' - %s (%s)' % (name, path)
continue
print 'Updating cache of required packages'
urllist = []
if not opts.download_api_only:
# transform 'url1, url2, url3' form into a list
if 'urllist' in config:
if type(config['urllist']) == str:
re_clist = re.compile('[, ]+')
urllist = [ i.strip() for i in re_clist.split(config['urllist'].strip()) ]
else:
urllist = config['urllist']
# OBS 1.5 and before has no downloadurl defined in buildinfo
if bi.downloadurl:
urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
2010-02-28 02:30:13 +01:00
if not opts.cpio_bulk_download:
2010-02-18 20:16:23 +01:00
urllist.append( '%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s' )
2009-10-20 16:30:15 +02:00
fetcher = Fetcher(cachedir = config['packagecachedir'],
urllist = urllist,
api_host_options = config['api_host_options'],
offline = opts.noinit,
http_debug = config['http_debug'],
enable_cpio = opts.cpio_bulk_download,
cookiejar=cookiejar)
# now update the package cache
fetcher.run(bi)
# Make packages from buildinfo available as repos for kiwi
if build_type == 'kiwi':
if not os.path.exists('repos'):
os.mkdir('repos')
else:
rmtree('repos')
os.mkdir('repos')
for i in bi.deps:
# project
pdir = str(i.extproject).replace(':/', ':')
# repo
rdir = str(i.extrepository).replace(':/', ':')
# arch
adir = i.repoarch
# project/repo
prdir = "repos/"+pdir+"/"+rdir
# project/repo/arch
pradir = prdir+"/"+adir
# source fullfilename
sffn = i.fullfilename
print "Using package: "+sffn
# target fullfilename
tffn = pradir+"/"+sffn.split("/")[-1]
if not os.path.exists(os.path.join(pradir)):
os.makedirs(os.path.join(pradir))
if not os.path.exists(tffn):
if opts.linksources:
2010-02-27 20:11:15 +01:00
os.link(sffn, tffn)
else:
2010-02-27 20:11:15 +01:00
os.symlink(sffn, tffn)
if bi.pacsuffix == 'rpm':
if opts.no_verify or opts.noinit:
print 'Skipping verification of package signatures'
else:
print 'Verifying integrity of cached packages'
2010-02-24 20:02:52 +01:00
t = config['api_host_options'][apiurl]['trusted_prj']
for prj in bi.prjkeys:
if not prj in t:
print "\nYou are trying to use packages from project '%s'." % prj
print "Note that malicious packages can compromise your system."
2009-11-11 15:48:25 +01:00
# saving back to config file is complicated
2010-02-24 20:02:52 +01:00
# r = raw_input("Would you like to trust '%s' (a)lways, (t)emorarily or (N)ever? " % prj)
# if r == 'a':
# config['api_host_options'][apiurl]['trusted_prj'] += prj
# elif r != 't':
# print "Well, good good bye then :-)"
# sys.exit(1)
2009-11-11 15:48:25 +01:00
verify_pacs([ i.fullfilename for i in bi.deps ], bi.keys)
elif bi.pacsuffix == 'deb':
if config['build-type'] == "xen" or config['build-type'] == "kvm":
print 'Skipping verification of package signatures due to secure VM build'
elif opts.no_verify or opts.noinit:
print 'Skipping verification of package signatures'
else:
print 'WARNING: deb packages get not verified, they can compromise your system !'
else:
2010-02-27 20:11:15 +01:00
print 'WARNING: unknown packages get not verified, they can compromise your system !'
print 'Writing build configuration'
rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ]
rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]
rpmlist.append('preinstall: ' + ' '.join(bi.preinstall_list) + '\n')
rpmlist.append('vminstall: ' + ' '.join(bi.vminstall_list) + '\n')
rpmlist.append('runscripts: ' + ' '.join(bi.runscripts_list) + '\n')
rpmlist_file = NamedTemporaryFile(prefix='rpmlist.')
rpmlist_filename = rpmlist_file.name
rpmlist_file.writelines(rpmlist)
2009-10-24 14:21:30 +02:00
rpmlist_file.flush()
2010-01-09 14:25:41 +01:00
vm_options = ''
if config['build-device'] and config['build-memory'] and config['build-type']:
my_build_device = config['build-device'] % { 'repo': repo, 'arch': arch,
'project' : prj, 'package' : pacname
}
2010-01-09 14:25:41 +01:00
if config['build-type'] == 'kvm':
vm_options = '--kvm ' + my_build_device
2010-01-09 14:25:41 +01:00
elif config['build-type'] == 'xen':
vm_options = '--xen ' + my_build_device
2009-10-20 16:30:15 +02:00
else:
2010-01-09 14:25:41 +01:00
raise oscerr.WrongArgs('ERROR: unknown VM is set ! ("%s")' % config['build-type'])
2009-10-20 16:30:15 +02:00
if config['build-swap']:
my_build_swap = config['build-swap'] % { 'repo': repo, 'arch': arch,
'project' : prj, 'package' : pacname
}
vm_options += ' --swap ' + my_build_swap
2009-10-20 16:30:15 +02:00
if config['build-memory']:
2010-01-09 14:25:41 +01:00
vm_options += ' --memory ' + config['build-memory']
2010-03-28 22:23:40 +02:00
if config['build-vmdisk-autosetup']:
if config['build-vmdisk-rootsize'] and config['build-vmdisk-swapsize']:
vm_options += ' --vmdisk-autosetup '
vm_options += ' --vmdisk-rootsize ' + config['build-vmdisk-rootsize']
vm_options += ' --vmdisk-swapsize ' + config['build-vmdisk-swapsize']
if config['build-vmdisk-force']:
vm_options += ' --vmdisk-force '
2009-10-20 16:30:15 +02:00
print 'Running build'
cmd = '"%s" --root="%s" --rpmlist="%s" --dist="%s" %s --arch=%s %s "%s" %s' \
% (config['build-cmd'],
build_root,
rpmlist_filename,
bc_filename,
specialcmdopts,
bi.buildarch,
vm_options,
2009-10-20 16:30:15 +02:00
build_descr,
buildargs)
if config['su-wrapper'].startswith('su '):
tmpl = '%s \'%s\''
else:
tmpl = '%s %s'
# change personality, if needed
cmd = tmpl % (config['su-wrapper'], cmd)
if hostarch != bi.buildarch:
cmd = (change_personality.get(bi.buildarch, '') + ' ' + cmd).strip()
rc = subprocess.call(cmd, shell=True)
2009-10-20 16:30:15 +02:00
if rc:
print
print 'The buildroot was:', build_root
sys.exit(rc)
pacdir = os.path.join(build_root, '.build.packages')
if os.path.islink(pacdir):
pacdir = os.readlink(pacdir)
pacdir = os.path.join(build_root, pacdir)
if os.path.exists(pacdir):
(s_built, b_built) = get_built_files(pacdir, bi.pacsuffix)
2009-10-20 16:30:15 +02:00
print
if s_built: print s_built
print
print b_built
if opts.keep_pkgs:
for i in b_built.splitlines() + s_built.splitlines():
import shutil
shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i)))
2009-11-26 10:52:26 +01:00
if bi_file:
bi_file.close()
if bc_file:
bc_file.close()
rpmlist_file.close()
# vim: sw=4 et