2006-04-20 16:26:50 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2006-10-12 15:22:56 +02:00
|
|
|
# Copyright (C) 2006 Peter Poeml / Novell Inc. All rights reserved.
|
2006-04-20 16:26:50 +02:00
|
|
|
# This program is free software; it may be used, copied, modified
|
|
|
|
# and distributed under the terms of the GNU General Public Licence,
|
|
|
|
# either version 2, or (at your option) any later version.
|
|
|
|
|
2007-10-30 12:47:06 +01:00
|
|
|
__version__ = '0.99'
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import urllib2
|
2008-03-17 22:10:34 +01:00
|
|
|
from urllib import pathname2url, quote_plus, urlencode
|
2007-05-04 23:51:54 +02:00
|
|
|
from urlparse import urlsplit, urlunsplit
|
2006-05-02 10:17:45 +02:00
|
|
|
from cStringIO import StringIO
|
2006-05-30 12:07:16 +02:00
|
|
|
import shutil
|
2006-10-10 16:04:34 +02:00
|
|
|
import conf
|
2007-03-13 00:17:34 +01:00
|
|
|
try:
|
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
except ImportError:
|
|
|
|
import cElementTree as ET
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
BUFSIZE = 1024*1024
|
|
|
|
store = '.osc'
|
2008-03-31 16:42:11 +02:00
|
|
|
exclude_stuff = [store, 'CVS', '*~', '.*']
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2006-05-23 17:27:43 +02:00
|
|
|
new_project_templ = """\
|
2008-03-07 10:13:30 +01:00
|
|
|
<project name="%(name)s">
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2006-05-23 17:27:43 +02:00
|
|
|
<title>Short title of NewProject</title>
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2006-05-23 17:27:43 +02:00
|
|
|
<description>This project aims at providing some foo and bar.
|
|
|
|
|
|
|
|
It also does some weird stuff.
|
|
|
|
</description>
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2008-03-07 10:13:30 +01:00
|
|
|
<person role="maintainer" userid="%(user)s" />
|
|
|
|
<person role="bugowner" userid="%(user)s" />
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2007-07-04 14:59:18 +02:00
|
|
|
<!-- remove this block to publish your packages on the mirrors -->
|
|
|
|
<publish>
|
|
|
|
<disable />
|
|
|
|
</publish>
|
|
|
|
<build>
|
|
|
|
<enable />
|
|
|
|
</build>
|
|
|
|
<debuginfo>
|
|
|
|
<disable />
|
|
|
|
</debuginfo>
|
|
|
|
|
2006-06-23 12:35:20 +02:00
|
|
|
<!-- remove this comment to enable one or more build targets
|
|
|
|
|
2007-07-04 19:48:50 +02:00
|
|
|
<repository name="openSUSE_Factory">
|
|
|
|
<path project="openSUSE:Factory" repository="standard" />
|
2006-06-23 12:35:20 +02:00
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
|
|
|
</repository>
|
2007-07-18 18:49:02 +02:00
|
|
|
<repository name="openSUSE_10.2">
|
|
|
|
<path project="openSUSE:10.2" repository="standard"/>
|
2007-07-04 14:59:18 +02:00
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
|
|
|
</repository>
|
2006-06-23 12:35:20 +02:00
|
|
|
<repository name="SUSE_Linux_10.1">
|
|
|
|
<path project="SUSE:SL-10.1" repository="standard" />
|
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
|
|
|
</repository>
|
|
|
|
<repository name="SUSE_Linux_10.0">
|
|
|
|
<path project="SUSE:SL-10.0" repository="standard" />
|
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
|
|
|
</repository>
|
2007-07-04 14:59:18 +02:00
|
|
|
<repository name="Fedora_7">
|
|
|
|
<path project="Fedora:7" repository="standard" />
|
2006-06-23 12:35:20 +02:00
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
|
|
|
</repository>
|
2007-07-04 14:59:18 +02:00
|
|
|
<repository name="SLE_10">
|
|
|
|
<path project="SUSE:SLE-10:SDK" repository="standard" />
|
2006-06-23 12:35:20 +02:00
|
|
|
<arch>x86_64</arch>
|
|
|
|
<arch>i586</arch>
|
2006-12-12 02:59:44 +01:00
|
|
|
</repository>
|
2006-06-23 12:35:20 +02:00
|
|
|
-->
|
|
|
|
|
2006-05-23 17:27:43 +02:00
|
|
|
</project>
|
2007-06-26 15:17:38 +02:00
|
|
|
"""
|
2006-05-23 17:27:43 +02:00
|
|
|
|
|
|
|
new_package_templ = """\
|
2008-03-07 10:13:30 +01:00
|
|
|
<package name="%(name)s">
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2006-06-22 13:26:01 +02:00
|
|
|
<title>Title of New Package</title>
|
2006-06-23 12:35:20 +02:00
|
|
|
|
2006-06-22 13:26:01 +02:00
|
|
|
<description>LONG DESCRIPTION
|
|
|
|
GOES
|
2006-06-23 12:35:20 +02:00
|
|
|
HERE
|
|
|
|
</description>
|
|
|
|
|
2008-03-07 10:13:30 +01:00
|
|
|
<person role="maintainer" userid="%(user)s"/>
|
|
|
|
<person role="bugowner" userid="%(user)s"/>
|
2006-06-23 12:35:20 +02:00
|
|
|
|
|
|
|
|
|
|
|
<!--
|
2007-05-04 23:51:54 +02:00
|
|
|
use one of the examples below to disable building of this package
|
2006-06-23 12:35:20 +02:00
|
|
|
on a certain architecture, in a certain repository,
|
|
|
|
or a combination thereof:
|
|
|
|
|
|
|
|
<disable arch="x86_64"/>
|
2006-12-12 02:59:44 +01:00
|
|
|
<disable repository="SUSE_SLE-10"/>
|
|
|
|
<disable repository="SUSE_SLE-10" arch="x86_64"/>
|
2006-06-23 12:35:20 +02:00
|
|
|
|
|
|
|
-->
|
|
|
|
|
2006-05-23 17:27:43 +02:00
|
|
|
</package>
|
|
|
|
"""
|
|
|
|
|
2006-10-12 15:22:56 +02:00
|
|
|
new_user_template = """\
|
|
|
|
<person>
|
2008-03-07 10:13:30 +01:00
|
|
|
<login>%(user)s</login>
|
2006-10-12 15:22:56 +02:00
|
|
|
<email>PUT_EMAIL_ADDRESS_HERE</email>
|
|
|
|
<realname>PUT_REAL_NAME_HERE</realname>
|
|
|
|
<watchlist>
|
2008-03-07 10:13:30 +01:00
|
|
|
<project name="home:%(user)s"/>
|
2006-10-12 15:22:56 +02:00
|
|
|
</watchlist>
|
|
|
|
</person>
|
|
|
|
"""
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
info_templ = """\
|
|
|
|
Path: %s
|
|
|
|
API URL: %s
|
2008-03-22 18:37:17 +01:00
|
|
|
srcmd5: %s
|
2007-05-04 23:51:54 +02:00
|
|
|
Revision: %s
|
2008-03-22 18:37:17 +01:00
|
|
|
Link info: %s
|
2007-05-04 23:51:54 +02:00
|
|
|
"""
|
|
|
|
|
2007-07-19 12:08:42 +02:00
|
|
|
new_pattern_template = """\
|
|
|
|
<!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
|
|
|
|
|
|
|
|
<pattern>
|
|
|
|
</pattern>
|
|
|
|
"""
|
|
|
|
|
2006-09-25 17:11:03 +02:00
|
|
|
buildstatus_symbols = {'succeeded': '.',
|
|
|
|
'disabled': ' ',
|
|
|
|
'expansion error': 'E',
|
|
|
|
'failed': 'F',
|
|
|
|
'broken': 'B',
|
|
|
|
'blocked': 'b',
|
|
|
|
'building': '%',
|
|
|
|
'scheduled': 's',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
class File:
|
|
|
|
"""represent a file, including its metadata"""
|
|
|
|
def __init__(self, name, md5, size, mtime):
|
|
|
|
self.name = name
|
|
|
|
self.md5 = md5
|
|
|
|
self.size = size
|
|
|
|
self.mtime = mtime
|
|
|
|
def __str__(self):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
|
2008-03-22 14:37:59 +01:00
|
|
|
class Linkinfo:
|
2008-03-22 18:37:17 +01:00
|
|
|
"""linkinfo metadata (which is part of the xml representing a directory
|
2008-03-22 14:37:59 +01:00
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
"""creates an empty linkinfo instance"""
|
|
|
|
self.project = None
|
|
|
|
self.package = None
|
|
|
|
self.xsrcmd5 = None
|
2008-03-22 18:37:17 +01:00
|
|
|
self.lsrcmd5 = None
|
|
|
|
self.srcmd5 = None
|
2008-03-22 14:37:59 +01:00
|
|
|
|
|
|
|
def read(self, linkinfo_node):
|
|
|
|
"""read in the linkinfo metadata from the <linkinfo> element passed as
|
|
|
|
elementtree node.
|
|
|
|
If the passed element is None, the method does nothing.
|
|
|
|
"""
|
|
|
|
if linkinfo_node == None:
|
|
|
|
return
|
|
|
|
self.project = linkinfo_node.get('project')
|
|
|
|
self.package = linkinfo_node.get('package')
|
|
|
|
self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
|
2008-03-22 18:37:17 +01:00
|
|
|
self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
|
|
|
|
self.srcmd5 = linkinfo_node.get('srcmd5')
|
2008-03-22 14:37:59 +01:00
|
|
|
|
|
|
|
def islink(self):
|
|
|
|
"""returns True if the linkinfo is not empty, otherwise False"""
|
2008-03-22 18:37:17 +01:00
|
|
|
if self.xsrcmd5 or self.lsrcmd5:
|
2008-03-22 14:37:59 +01:00
|
|
|
return True
|
2008-03-22 18:37:17 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
def isexpanded(self):
|
|
|
|
"""returns True if the package is an expanded link"""
|
|
|
|
if self.lsrcmd5 and not self.xsrcmd5:
|
|
|
|
return True
|
|
|
|
return False
|
2008-03-22 14:37:59 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
"""return an informatory string representation"""
|
2008-03-22 18:37:17 +01:00
|
|
|
if self.islink() and not self.isexpanded():
|
|
|
|
return 'project %s, package %s, xsrcmd5 %s' \
|
2008-03-22 14:37:59 +01:00
|
|
|
% (self.project, self.package, self.xsrcmd5)
|
2008-03-22 18:37:17 +01:00
|
|
|
elif self.islink() and self.isexpanded():
|
|
|
|
return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
|
|
|
|
% (self.project, self.package, self.srcmd5, self.lsrcmd5)
|
2008-03-22 14:37:59 +01:00
|
|
|
else:
|
|
|
|
return 'None'
|
|
|
|
|
|
|
|
|
2006-05-22 12:50:37 +02:00
|
|
|
class Project:
|
|
|
|
"""represent a project directory, holding packages"""
|
2008-03-10 19:04:23 +01:00
|
|
|
def __init__(self, dir, getPackageList=True):
|
2008-03-31 16:42:11 +02:00
|
|
|
import fnmatch
|
2006-05-22 12:50:37 +02:00
|
|
|
self.dir = dir
|
2006-06-06 12:50:40 +02:00
|
|
|
self.absdir = os.path.abspath(dir)
|
2006-05-22 12:50:37 +02:00
|
|
|
|
|
|
|
self.name = store_read_project(self.dir)
|
2007-05-04 23:51:54 +02:00
|
|
|
self.apiurl = store_read_apiurl(self.dir)
|
2006-05-22 12:50:37 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
if getPackageList:
|
|
|
|
self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
|
|
|
|
else:
|
|
|
|
self.pacs_available = []
|
|
|
|
|
|
|
|
if conf.config['do_package_tracking']:
|
|
|
|
self.pac_root = self.read_packages().getroot()
|
|
|
|
self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
|
2008-03-31 16:42:11 +02:00
|
|
|
self.pacs_excluded = [ i for i in os.listdir(self.dir)
|
|
|
|
for j in exclude_stuff
|
|
|
|
if fnmatch.fnmatch(i, j) ]
|
|
|
|
self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
|
2008-03-10 19:04:23 +01:00
|
|
|
# store all broken packages (e.g. packages which where removed by a non-osc cmd)
|
|
|
|
# in the self.pacs_broken list
|
|
|
|
self.pacs_broken = []
|
|
|
|
for p in self.pacs_have:
|
|
|
|
if not os.path.isdir(os.path.join(self.absdir, p)):
|
|
|
|
# all states will be replaced with the '!'-state
|
|
|
|
# (except it is already marked as deleted ('D'-state))
|
|
|
|
self.pacs_broken.append(p)
|
|
|
|
else:
|
|
|
|
self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
|
2006-05-22 12:50:37 +02:00
|
|
|
|
2006-06-26 17:11:22 +02:00
|
|
|
self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
|
2006-05-22 12:50:37 +02:00
|
|
|
|
|
|
|
def checkout_missing_pacs(self):
|
|
|
|
for pac in self.pacs_missing:
|
2008-03-10 19:04:23 +01:00
|
|
|
|
|
|
|
if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
|
|
|
|
# pac is not under version control but a local file/dir exists
|
|
|
|
print 'can\'t add package \'%s\': Object already exists' % pac
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print 'checking out new package %s' % pac
|
|
|
|
olddir = os.getcwd()
|
|
|
|
#os.chdir(os.pardir)
|
|
|
|
os.chdir(os.path.join(self.absdir, os.pardir))
|
|
|
|
#checkout_package(self.apiurl, self.name, pac, pathname = os.path.normpath(os.path.join(self.dir, pac)))
|
|
|
|
checkout_package(self.apiurl, self.name, pac, pathname = getTransActPath(os.path.join(self.dir, pac)), prj_obj=self)
|
|
|
|
os.chdir(olddir)
|
|
|
|
#self.new_package_entry(pac, ' ')
|
|
|
|
#self.pacs_have.append(pac)
|
|
|
|
|
|
|
|
def set_state(self, pac, state):
|
|
|
|
node = self.get_package_node(pac)
|
|
|
|
if node == None:
|
|
|
|
self.new_package_entry(pac, state)
|
|
|
|
else:
|
|
|
|
node.attrib['state'] = state
|
|
|
|
|
|
|
|
def get_package_node(self, pac):
|
|
|
|
for node in self.pac_root.findall('package'):
|
|
|
|
if pac == node.get('name'):
|
|
|
|
return node
|
|
|
|
return None
|
|
|
|
|
|
|
|
def del_package_node(self, pac):
|
|
|
|
for node in self.pac_root.findall('package'):
|
|
|
|
if pac == node.get('name'):
|
|
|
|
self.pac_root.remove(node)
|
|
|
|
|
|
|
|
def get_state(self, pac):
|
|
|
|
node = self.get_package_node(pac)
|
|
|
|
if node != None:
|
|
|
|
return node.get('state')
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def new_package_entry(self, name, state):
|
|
|
|
ET.SubElement(self.pac_root, 'package', name=name, state=state)
|
|
|
|
|
|
|
|
def read_packages(self):
|
|
|
|
if os.path.isfile(os.path.join(self.absdir, store, '_packages')):
|
|
|
|
return ET.parse(os.path.join(self.absdir, store, '_packages'))
|
|
|
|
else:
|
|
|
|
# scan project for existing packages and migrate them
|
|
|
|
cur_pacs = []
|
|
|
|
for data in os.listdir(self.dir):
|
|
|
|
pac_dir = os.path.join(self.absdir, data)
|
|
|
|
# we cannot use self.pacs_available because we cannot guarantee that the package list
|
|
|
|
# was fetched from the server
|
|
|
|
if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
|
|
|
|
and Package(pac_dir).name == data:
|
|
|
|
cur_pacs.append(ET.Element('package', name=data, state=' '))
|
|
|
|
store_write_initial_packages(self.absdir, self.name, cur_pacs)
|
|
|
|
return ET.parse(os.path.join(self.absdir, store, '_packages'))
|
|
|
|
|
|
|
|
def write_packages(self):
|
|
|
|
# TODO: should we only modify the existing file instead of overwriting?
|
|
|
|
ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
|
|
|
|
|
|
|
|
def addPackage(self, pac):
|
|
|
|
state = self.get_state(pac)
|
|
|
|
if state == None or state == 'D':
|
|
|
|
self.new_package_entry(pac, 'A')
|
|
|
|
self.write_packages()
|
|
|
|
# sometimes the new pac doesn't exist in the list because
|
|
|
|
# it would take too much time to update all data structs regulary
|
|
|
|
if pac in self.pacs_unvers:
|
|
|
|
self.pacs_unvers.remove(pac)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
print 'package \'%s\' is already under version control' % pac
|
|
|
|
return False
|
|
|
|
|
|
|
|
def delPackage(self, pac, force = False):
|
|
|
|
state = self.get_state(pac.name)
|
|
|
|
can_delete = True
|
|
|
|
if state == ' ' or state == 'D':
|
|
|
|
del_files = []
|
|
|
|
for file in pac.filenamelist + pac.filenamelist_unvers:
|
|
|
|
filestate = pac.status(file)
|
|
|
|
if filestate == 'M' or filestate == 'C' or \
|
|
|
|
filestate == 'A' or filestate == '?':
|
|
|
|
can_delete = False
|
|
|
|
else:
|
|
|
|
del_files.append(file)
|
|
|
|
if can_delete or force:
|
|
|
|
for file in del_files:
|
|
|
|
pac.delete_localfile(file)
|
|
|
|
if pac.status(file) != '?':
|
|
|
|
pac.delete_storefile(file)
|
|
|
|
# this is not really necessary
|
|
|
|
pac.put_on_deletelist(file)
|
|
|
|
print statfrmt('D', os.path.join(pac.dir, file))
|
|
|
|
#print os.path.dirname(pac.dir)
|
|
|
|
# some black path vodoo
|
|
|
|
print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
|
|
|
|
pac.write_deletelist()
|
|
|
|
self.set_state(pac.name, 'D')
|
|
|
|
self.write_packages()
|
|
|
|
else:
|
|
|
|
print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
|
|
|
|
elif state == 'A':
|
|
|
|
if force:
|
|
|
|
delete_dir(pac.absdir)
|
|
|
|
self.del_package_node(pac.name)
|
|
|
|
self.write_packages()
|
|
|
|
print statfrmt('D', pac.name)
|
|
|
|
else:
|
|
|
|
print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
|
|
|
|
elif state == None:
|
|
|
|
print 'package is not under version control'
|
|
|
|
else:
|
|
|
|
print 'unsupported state'
|
|
|
|
|
2008-03-24 21:24:49 +01:00
|
|
|
def update(self, pacs = (), expand_link=False, unexpand_link=False):
|
2008-03-10 19:04:23 +01:00
|
|
|
if len(pacs):
|
|
|
|
for pac in pacs:
|
|
|
|
Package(os.path.join(self.dir, pac)).update()
|
|
|
|
else:
|
|
|
|
# update complete project
|
|
|
|
# packages which no longer exists upstream
|
|
|
|
upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
|
|
|
|
|
|
|
|
for pac in upstream_del:
|
|
|
|
p = Package(os.path.join(self.dir, pac))
|
|
|
|
self.delPackage(p, force = True)
|
|
|
|
delete_storedir(p.storedir)
|
|
|
|
try:
|
|
|
|
os.rmdir(pac)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.pac_root.remove(self.get_package_node(p.name))
|
|
|
|
self.pacs_have.remove(pac)
|
|
|
|
|
|
|
|
for pac in self.pacs_have:
|
|
|
|
state = self.get_state(pac)
|
|
|
|
if pac in self.pacs_broken:
|
|
|
|
if self.get_state(pac) != 'A':
|
|
|
|
olddir = self.absdir
|
|
|
|
os.chdir(os.path.join(self.absdir, os.pardir))
|
|
|
|
checkout_package(self.apiurl, self.name, pac,
|
|
|
|
pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self)
|
|
|
|
os.chdir(olddir)
|
|
|
|
elif state == ' ':
|
|
|
|
# do a simple update
|
2008-03-24 21:24:49 +01:00
|
|
|
p = Package(os.path.join(self.dir, pac))
|
|
|
|
rev = None
|
|
|
|
if expand_link and p.islink() and not p.isexpanded():
|
|
|
|
print 'Expanding to rev', p.linkinfo.xsrcmd5
|
|
|
|
rev = p.linkinfo.xsrcmd5
|
|
|
|
elif unexpand_link and p.islink() and p.isexpanded():
|
|
|
|
print 'Unexpanding to rev', p.linkinfo.lsrcmd5
|
|
|
|
rev = p.linkinfo.lsrcmd5
|
|
|
|
elif p.islink() and p.isexpanded():
|
2008-03-27 01:36:17 +01:00
|
|
|
rev = show_upstream_xsrcmd5(p.apiurl,
|
2008-03-24 21:24:49 +01:00
|
|
|
p.prjname, p.name)
|
|
|
|
p.update(rev)
|
2008-03-10 19:04:23 +01:00
|
|
|
elif state == 'D':
|
|
|
|
# TODO: Package::update has to fixed to behave like svn does
|
|
|
|
if pac in self.pacs_broken:
|
|
|
|
olddir = self.absdir
|
|
|
|
os.chdir(os.path.join(self.absdir, os.pardir))
|
|
|
|
checkout_package(self.apiurl, self.name, pac,
|
|
|
|
pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self)
|
|
|
|
os.chdir(olddir)
|
|
|
|
else:
|
|
|
|
Package(os.path.join(self.dir, pac)).update()
|
|
|
|
elif state == 'A' and pac in self.pacs_available:
|
|
|
|
# file/dir called pac already exists and is under version control
|
|
|
|
print 'can\'t add package \'%s\': Object already exists' % pac
|
|
|
|
sys.exit(1)
|
|
|
|
elif state == 'A':
|
|
|
|
# do nothing
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print 'unexpected state.. package \'%s\'' % pac
|
|
|
|
|
|
|
|
self.checkout_missing_pacs()
|
|
|
|
self.write_packages()
|
|
|
|
|
|
|
|
def commit(self, pacs = (), msg = '', files = {}):
|
|
|
|
if len(pacs):
|
|
|
|
for pac in pacs:
|
|
|
|
todo = []
|
|
|
|
if files.has_key(pac):
|
|
|
|
todo = files[pac]
|
|
|
|
state = self.get_state(pac)
|
|
|
|
if state == 'A':
|
|
|
|
self.commitNewPackage(pac, msg, todo)
|
|
|
|
elif state == 'D':
|
|
|
|
self.commitDelPackage(pac)
|
|
|
|
elif state == ' ':
|
|
|
|
# display the correct dir when sending the changes
|
|
|
|
if os.path.samefile(os.path.join(self.dir, pac), os.getcwd()):
|
|
|
|
p = Package('.')
|
|
|
|
else:
|
|
|
|
p = Package(os.path.join(self.dir, pac))
|
|
|
|
p.todo = todo
|
|
|
|
p.commit(msg)
|
|
|
|
elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
|
|
|
|
print 'osc: \'%s\' is not under version control' % pac
|
|
|
|
elif pac in self.pacs_broken:
|
|
|
|
print 'osc: \'%s\' package not found' % pac
|
|
|
|
elif state == None:
|
|
|
|
self.commitExtPackage(pac, msg, todo)
|
|
|
|
else:
|
|
|
|
# if we have packages marked as '!' we cannot commit
|
|
|
|
for pac in self.pacs_broken:
|
|
|
|
if self.get_state(pac) != 'D':
|
|
|
|
print 'commit failed: package \'%s\' is missing' % pac
|
|
|
|
sys.exit(1)
|
|
|
|
for pac in self.pacs_have:
|
|
|
|
state = self.get_state(pac)
|
|
|
|
if state == ' ':
|
|
|
|
# do a simple commit
|
|
|
|
try:
|
|
|
|
Package(os.path.join(self.dir, pac)).commit(msg)
|
|
|
|
except SystemExit:
|
|
|
|
pass
|
|
|
|
elif state == 'D':
|
|
|
|
self.commitDelPackage(pac)
|
|
|
|
elif state == 'A':
|
|
|
|
self.commitNewPackage(pac, msg)
|
|
|
|
self.write_packages()
|
|
|
|
|
|
|
|
def commitNewPackage(self, pac, msg = '', files = []):
|
|
|
|
"""creates and commits a new package if it does not exist on the server"""
|
|
|
|
if pac in self.pacs_available:
|
|
|
|
print 'package \'%s\' already exists' % pac
|
|
|
|
else:
|
2008-03-15 23:51:37 +01:00
|
|
|
user = conf.get_apiurl_usr(self.apiurl)
|
2008-03-10 19:04:23 +01:00
|
|
|
edit_meta(metatype='pkg',
|
|
|
|
path_args=(quote_plus(self.name), quote_plus(pac)),
|
|
|
|
template_args=({
|
|
|
|
'name': pac,
|
2008-03-15 23:51:37 +01:00
|
|
|
'user': user}),
|
2008-03-10 19:04:23 +01:00
|
|
|
apiurl=self.apiurl)
|
|
|
|
# display the correct dir when sending the changes
|
2006-05-22 12:50:37 +02:00
|
|
|
olddir = os.getcwd()
|
2008-03-10 19:04:23 +01:00
|
|
|
if os.path.samefile(os.path.join(self.dir, pac), os.curdir):
|
|
|
|
os.chdir(os.pardir)
|
|
|
|
p = Package(pac)
|
|
|
|
else:
|
|
|
|
p = Package(os.path.join(self.dir, pac))
|
|
|
|
p.todo = files
|
|
|
|
#print statfrmt('Sending', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
|
|
|
|
print statfrmt('Sending', os.path.normpath(p.dir))
|
|
|
|
try:
|
|
|
|
p.commit(msg)
|
|
|
|
except SystemExit:
|
|
|
|
pass
|
|
|
|
self.set_state(pac, ' ')
|
2006-05-22 12:50:37 +02:00
|
|
|
os.chdir(olddir)
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def commitDelPackage(self, pac):
|
|
|
|
"""deletes a package on the server and in the working copy"""
|
|
|
|
try:
|
|
|
|
# display the correct dir when sending the changes
|
|
|
|
if os.path.samefile(os.path.join(self.dir, pac), os.curdir):
|
|
|
|
pac_dir = pac
|
|
|
|
else:
|
|
|
|
pac_dir = os.path.join(self.dir, pac)
|
|
|
|
p = Package(os.path.join(self.dir, pac))
|
|
|
|
#print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
|
|
|
|
delete_storedir(p.storedir)
|
|
|
|
try:
|
|
|
|
os.rmdir(p.dir)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
except SystemExit:
|
|
|
|
pass
|
|
|
|
except OSError:
|
|
|
|
pac_dir = os.path.join(self.dir, pac)
|
|
|
|
#print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
|
|
|
|
print statfrmt('Deleting', getTransActPath(pac_dir))
|
|
|
|
delete_package(self.apiurl, self.name, pac)
|
|
|
|
self.del_package_node(pac)
|
|
|
|
|
|
|
|
def commitExtPackage(self, pac, msg, files = []):
|
|
|
|
"""commits a package from an external project"""
|
|
|
|
if os.path.samefile(os.path.join(self.dir, pac), os.getcwd()):
|
|
|
|
pac_path = '.'
|
|
|
|
else:
|
|
|
|
pac_path = os.path.join(self.dir, pac)
|
|
|
|
|
|
|
|
project = store_read_project(pac_path)
|
|
|
|
package = store_read_package(pac_path)
|
|
|
|
apiurl = store_read_apiurl(pac_path)
|
|
|
|
if meta_exists(metatype='pkg',
|
|
|
|
path_args=(quote_plus(project), quote_plus(package)),
|
|
|
|
template_args=None,
|
|
|
|
create_new=False, apiurl=apiurl):
|
|
|
|
p = Package(pac_path)
|
|
|
|
p.todo = files
|
|
|
|
p.commit(msg)
|
|
|
|
else:
|
2008-03-15 23:51:37 +01:00
|
|
|
user = conf.get_apiurl_usr(self.apiurl)
|
2008-03-10 19:04:23 +01:00
|
|
|
edit_meta(metatype='pkg',
|
|
|
|
path_args=(quote_plus(project), quote_plus(package)),
|
|
|
|
template_args=({
|
|
|
|
'name': pac,
|
2008-03-15 23:51:37 +01:00
|
|
|
'user': user}),
|
2008-03-10 19:04:23 +01:00
|
|
|
apiurl=apiurl)
|
|
|
|
try:
|
|
|
|
p = Package(pac_path)
|
|
|
|
p.todo = files
|
|
|
|
p.commit(msg)
|
|
|
|
except SystemExit:
|
|
|
|
pass
|
2006-05-22 12:50:37 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
r = []
|
|
|
|
r.append('*****************************************************')
|
|
|
|
r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
|
|
|
|
r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
|
|
|
|
r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
|
|
|
|
r.append('*****************************************************')
|
|
|
|
return '\n'.join(r)
|
|
|
|
|
|
|
|
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
class Package:
|
|
|
|
"""represent a package (its directory) and read/keep/write its metadata"""
|
|
|
|
def __init__(self, workingdir):
|
|
|
|
self.dir = workingdir
|
2006-06-06 12:50:40 +02:00
|
|
|
self.absdir = os.path.abspath(self.dir)
|
2007-11-26 19:13:51 +01:00
|
|
|
self.storedir = os.path.join(self.absdir, store)
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
check_store_version(self.dir)
|
|
|
|
|
|
|
|
self.prjname = store_read_project(self.dir)
|
|
|
|
self.name = store_read_package(self.dir)
|
2007-05-04 23:51:54 +02:00
|
|
|
self.apiurl = store_read_apiurl(self.dir)
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-07-30 13:12:42 +02:00
|
|
|
self.update_datastructs()
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
self.todo = []
|
|
|
|
self.todo_send = []
|
|
|
|
self.todo_delete = []
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def info(self):
|
2008-03-22 18:37:17 +01:00
|
|
|
r = info_templ % (self.dir, self.apiurl, self.srcmd5, self.rev, self.linkinfo)
|
|
|
|
return r
|
2007-05-04 23:51:54 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
def addfile(self, n):
|
|
|
|
st = os.stat(os.path.join(self.dir, n))
|
2006-06-16 14:19:02 +02:00
|
|
|
f = File(n, None, st.st_size, st.st_mtime)
|
2006-05-19 22:13:29 +02:00
|
|
|
self.filelist.append(f)
|
|
|
|
self.filenamelist.append(n)
|
|
|
|
self.filenamelist_unvers.remove(n)
|
2006-05-30 12:07:16 +02:00
|
|
|
shutil.copy2(os.path.join(self.dir, n), os.path.join(self.storedir, n))
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-03-19 14:30:59 +01:00
|
|
|
def delete_storefile(self, n):
|
|
|
|
try: os.unlink(os.path.join(self.storedir, n))
|
|
|
|
except: pass
|
|
|
|
|
2006-05-30 12:07:16 +02:00
|
|
|
def delete_localfile(self, n):
|
|
|
|
try: os.unlink(os.path.join(self.dir, n))
|
|
|
|
except: pass
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
def put_on_deletelist(self, n):
|
|
|
|
if n not in self.to_be_deleted:
|
|
|
|
self.to_be_deleted.append(n)
|
|
|
|
|
2006-05-22 16:12:06 +02:00
|
|
|
def put_on_conflictlist(self, n):
|
|
|
|
if n not in self.in_conflict:
|
|
|
|
self.in_conflict.append(n)
|
|
|
|
|
|
|
|
def clear_from_conflictlist(self, n):
|
|
|
|
"""delete an entry from the file, and remove the file if it would be empty"""
|
|
|
|
if n in self.in_conflict:
|
|
|
|
|
|
|
|
filename = os.path.join(self.dir, n)
|
|
|
|
storefilename = os.path.join(self.storedir, n)
|
|
|
|
myfilename = os.path.join(self.dir, n + '.mine')
|
|
|
|
upfilename = os.path.join(self.dir, n + '.r' + self.rev)
|
|
|
|
|
2006-06-08 12:30:29 +02:00
|
|
|
try:
|
|
|
|
os.unlink(myfilename)
|
|
|
|
# the working copy may be updated, so the .r* ending may be obsolete...
|
|
|
|
# then we don't care
|
|
|
|
os.unlink(upfilename)
|
|
|
|
except:
|
|
|
|
pass
|
2006-05-22 16:12:06 +02:00
|
|
|
|
|
|
|
self.in_conflict.remove(n)
|
|
|
|
|
2006-05-23 15:59:29 +02:00
|
|
|
self.write_conflictlist()
|
2006-05-22 16:12:06 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
def write_deletelist(self):
|
2006-05-22 16:12:06 +02:00
|
|
|
if len(self.to_be_deleted) == 0:
|
2006-05-22 19:51:44 +02:00
|
|
|
try:
|
|
|
|
os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
|
|
|
|
except:
|
|
|
|
pass
|
2006-05-22 16:12:06 +02:00
|
|
|
else:
|
|
|
|
fname = os.path.join(self.storedir, '_to_be_deleted')
|
|
|
|
f = open(fname, 'w')
|
|
|
|
f.write('\n'.join(self.to_be_deleted))
|
|
|
|
f.write('\n')
|
|
|
|
f.close()
|
|
|
|
|
2006-05-30 12:07:16 +02:00
|
|
|
def delete_source_file(self, n):
|
2008-03-10 19:04:23 +01:00
|
|
|
"""delete local a source file"""
|
2006-05-30 12:07:16 +02:00
|
|
|
self.delete_localfile(n)
|
2007-03-19 14:30:59 +01:00
|
|
|
self.delete_storefile(n)
|
2006-05-30 12:07:16 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def delete_remote_source_file(self, n):
|
|
|
|
"""delete a remote source file (e.g. from the server)"""
|
|
|
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)])
|
|
|
|
http_DELETE(u)
|
|
|
|
|
2006-05-30 12:07:16 +02:00
|
|
|
def put_source_file(self, n):
|
|
|
|
|
2006-06-06 12:50:40 +02:00
|
|
|
# escaping '+' in the URL path (note: not in the URL query string) is
|
|
|
|
# only a workaround for ruby on rails, which swallows it otherwise
|
2007-08-14 12:30:39 +02:00
|
|
|
query = ['rev=upload']
|
2007-05-09 11:36:55 +02:00
|
|
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
|
2007-04-19 10:47:22 +02:00
|
|
|
http_PUT(u, file = os.path.join(self.dir, n))
|
2006-05-30 12:07:16 +02:00
|
|
|
|
|
|
|
shutil.copy2(os.path.join(self.dir, n), os.path.join(self.storedir, n))
|
|
|
|
|
2006-10-12 15:22:56 +02:00
|
|
|
def commit(self, msg=''):
|
2007-07-30 13:12:42 +02:00
|
|
|
# commit only if the upstream revision is the same as the working copy's
|
|
|
|
upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
|
|
|
|
if self.rev != upstream_rev:
|
|
|
|
print >>sys.stderr, 'Working copy \'%s\' is out of date (rev %s vs rev %s).' \
|
|
|
|
% (self.absdir, self.rev, upstream_rev)
|
|
|
|
print >>sys.stderr, 'Looks as if you need to update it first.'
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if not self.todo:
|
|
|
|
self.todo = self.filenamelist_unvers + self.filenamelist
|
2008-03-10 19:04:23 +01:00
|
|
|
|
|
|
|
pathn = getTransActPath(self.dir)
|
2007-07-30 13:12:42 +02:00
|
|
|
|
|
|
|
for filename in self.todo:
|
|
|
|
st = self.status(filename)
|
|
|
|
if st == 'A' or st == 'M':
|
|
|
|
self.todo_send.append(filename)
|
2008-03-10 19:04:23 +01:00
|
|
|
print statfrmt('Sending', os.path.join(pathn, filename))
|
2007-07-30 13:12:42 +02:00
|
|
|
elif st == 'D':
|
|
|
|
self.todo_delete.append(filename)
|
2008-03-10 19:04:23 +01:00
|
|
|
print statfrmt('Deleting', os.path.join(pathn, filename))
|
2007-07-30 13:12:42 +02:00
|
|
|
|
|
|
|
if not self.todo_send and not self.todo_delete:
|
|
|
|
print 'nothing to do for package %s' % self.name
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
print 'Transmitting file data ',
|
|
|
|
for filename in self.todo_delete:
|
2008-03-10 19:04:23 +01:00
|
|
|
# do not touch local files on commit --
|
|
|
|
# delete remotely instead
|
|
|
|
self.delete_remote_source_file(filename)
|
2007-07-30 13:12:42 +02:00
|
|
|
self.to_be_deleted.remove(filename)
|
|
|
|
for filename in self.todo_send:
|
|
|
|
sys.stdout.write('.')
|
|
|
|
sys.stdout.flush()
|
|
|
|
self.put_source_file(filename)
|
2008-03-10 19:04:23 +01:00
|
|
|
|
2007-07-30 13:12:42 +02:00
|
|
|
# all source files are committed - now comes the log
|
2007-08-14 12:30:39 +02:00
|
|
|
query = []
|
|
|
|
query.append('cmd=commit')
|
|
|
|
query.append('rev=upload')
|
2008-03-15 23:51:37 +01:00
|
|
|
query.append('user=%s' % conf.get_apiurl_usr(self.apiurl))
|
2007-08-14 12:30:39 +02:00
|
|
|
query.append('comment=%s' % quote_plus(msg))
|
|
|
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
2008-03-10 19:04:23 +01:00
|
|
|
|
2007-08-14 12:30:39 +02:00
|
|
|
f = http_POST(u)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
self.rev = int(root.get('rev'))
|
|
|
|
print
|
|
|
|
print 'Committed revision %s.' % self.rev
|
2007-07-30 13:12:42 +02:00
|
|
|
|
|
|
|
self.update_local_filesmeta()
|
|
|
|
self.write_deletelist()
|
2006-10-10 16:04:34 +02:00
|
|
|
|
2006-05-22 16:12:06 +02:00
|
|
|
def write_conflictlist(self):
|
|
|
|
if len(self.in_conflict) == 0:
|
|
|
|
os.unlink(os.path.join(self.storedir, '_in_conflict'))
|
|
|
|
else:
|
|
|
|
fname = os.path.join(self.storedir, '_in_conflict')
|
|
|
|
f = open(fname, 'w')
|
|
|
|
f.write('\n'.join(self.in_conflict))
|
|
|
|
f.write('\n')
|
|
|
|
f.close()
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def updatefile(self, n, revision):
|
2006-05-19 22:13:29 +02:00
|
|
|
filename = os.path.join(self.dir, n)
|
|
|
|
storefilename = os.path.join(self.storedir, n)
|
2006-05-22 10:16:31 +02:00
|
|
|
mtime = self.findfilebyname(n).mtime
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision)
|
2006-05-19 22:13:29 +02:00
|
|
|
os.utime(filename, (-1, mtime))
|
|
|
|
|
2006-05-30 12:07:16 +02:00
|
|
|
shutil.copy2(filename, storefilename)
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2006-05-22 16:12:06 +02:00
|
|
|
def mergefile(self, n):
|
|
|
|
filename = os.path.join(self.dir, n)
|
|
|
|
storefilename = os.path.join(self.storedir, n)
|
|
|
|
myfilename = os.path.join(self.dir, n + '.mine')
|
|
|
|
upfilename = os.path.join(self.dir, n + '.r' + self.rev)
|
|
|
|
os.rename(filename, myfilename)
|
|
|
|
|
2006-06-22 14:54:58 +02:00
|
|
|
mtime = self.findfilebyname(n).mtime
|
2008-03-26 17:11:11 +01:00
|
|
|
get_source_file(self.apiurl, self.prjname, self.name, n,
|
|
|
|
revision=self.rev, targetfilename=upfilename)
|
2006-06-22 14:54:58 +02:00
|
|
|
os.utime(upfilename, (-1, mtime))
|
|
|
|
|
|
|
|
if binary_file(myfilename) or binary_file(upfilename):
|
|
|
|
# don't try merging
|
|
|
|
shutil.copy2(upfilename, filename)
|
|
|
|
shutil.copy2(upfilename, storefilename)
|
|
|
|
self.in_conflict.append(n)
|
|
|
|
self.write_conflictlist()
|
|
|
|
return 'C'
|
2006-05-22 16:12:06 +02:00
|
|
|
else:
|
2006-06-22 14:54:58 +02:00
|
|
|
# try merging
|
2007-01-23 07:55:43 +01:00
|
|
|
# diff3 OPTIONS... MINE OLDER YOURS
|
|
|
|
merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
|
|
|
|
# we would rather use the subprocess module, but it is not availablebefore 2.4
|
|
|
|
ret = os.system(merge_cmd) / 256
|
|
|
|
|
|
|
|
# "An exit status of 0 means `diff3' was successful, 1 means some
|
|
|
|
# conflicts were found, and 2 means trouble."
|
2006-06-22 14:54:58 +02:00
|
|
|
if ret == 0:
|
|
|
|
# merge was successful... clean up
|
2007-01-23 07:55:43 +01:00
|
|
|
shutil.copy2(upfilename, storefilename)
|
|
|
|
os.unlink(upfilename)
|
2006-06-22 14:54:58 +02:00
|
|
|
os.unlink(myfilename)
|
|
|
|
return 'G'
|
2007-01-23 07:55:43 +01:00
|
|
|
elif ret == 1:
|
2006-06-22 14:54:58 +02:00
|
|
|
# unsuccessful merge
|
2007-01-23 07:55:43 +01:00
|
|
|
shutil.copy2(upfilename, storefilename)
|
2006-06-22 14:54:58 +02:00
|
|
|
self.in_conflict.append(n)
|
|
|
|
self.write_conflictlist()
|
|
|
|
return 'C'
|
2007-01-23 07:55:43 +01:00
|
|
|
else:
|
|
|
|
print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
|
|
|
|
print >>sys.stderr, 'the command line was:'
|
|
|
|
print >>sys.stderr, merge_cmd
|
|
|
|
sys.exit(1)
|
2006-05-22 16:12:06 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
2007-07-16 15:40:58 +02:00
|
|
|
def update_local_filesmeta(self, revision=None):
|
|
|
|
"""
|
|
|
|
Update the local _files file in the store.
|
|
|
|
It is replaced with the version pulled from upstream.
|
|
|
|
"""
|
2007-07-04 14:55:26 +02:00
|
|
|
meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision))
|
2006-05-19 22:13:29 +02:00
|
|
|
f = open(os.path.join(self.storedir, '_files'), 'w')
|
|
|
|
f.write(meta)
|
|
|
|
f.close()
|
2007-07-30 13:12:42 +02:00
|
|
|
|
|
|
|
def update_datastructs(self):
|
|
|
|
"""
|
|
|
|
Update the internal data structures if the local _files
|
|
|
|
file has changed (e.g. update_local_filesmeta() has been
|
|
|
|
called).
|
|
|
|
"""
|
|
|
|
import fnmatch
|
|
|
|
files_tree = read_filemeta(self.dir)
|
|
|
|
files_tree_root = files_tree.getroot()
|
|
|
|
|
|
|
|
self.rev = files_tree_root.get('rev')
|
|
|
|
self.srcmd5 = files_tree_root.get('srcmd5')
|
|
|
|
|
2008-03-22 14:37:59 +01:00
|
|
|
self.linkinfo = Linkinfo()
|
|
|
|
self.linkinfo.read(files_tree_root.find('linkinfo'))
|
|
|
|
|
2007-07-30 13:12:42 +02:00
|
|
|
self.filenamelist = []
|
|
|
|
self.filelist = []
|
|
|
|
for node in files_tree_root.findall('entry'):
|
|
|
|
try:
|
|
|
|
f = File(node.get('name'),
|
|
|
|
node.get('md5'),
|
|
|
|
int(node.get('size')),
|
|
|
|
int(node.get('mtime')))
|
|
|
|
except:
|
|
|
|
# okay, a very old version of _files, which didn't contain any metadata yet...
|
|
|
|
f = File(node.get('name'), '', 0, 0)
|
|
|
|
self.filelist.append(f)
|
|
|
|
self.filenamelist.append(f.name)
|
|
|
|
|
|
|
|
self.to_be_deleted = read_tobedeleted(self.dir)
|
|
|
|
self.in_conflict = read_inconflict(self.dir)
|
|
|
|
|
|
|
|
# gather unversioned files, but ignore some stuff
|
|
|
|
self.excluded = [ i for i in os.listdir(self.dir)
|
|
|
|
for j in exclude_stuff
|
|
|
|
if fnmatch.fnmatch(i, j) ]
|
|
|
|
self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
|
|
|
|
if i not in self.excluded
|
|
|
|
if i not in self.filenamelist ]
|
|
|
|
|
2008-03-22 14:37:59 +01:00
|
|
|
def islink(self):
|
2008-03-22 18:37:17 +01:00
|
|
|
"""tells us if the package is a link (has 'linkinfo').
|
2008-03-22 14:37:59 +01:00
|
|
|
A package with linkinfo is a package which links to another package.
|
|
|
|
Returns True if the package is a link, otherwise False."""
|
|
|
|
return self.linkinfo.islink()
|
|
|
|
|
2008-03-22 18:37:17 +01:00
|
|
|
def isexpanded(self):
|
|
|
|
"""tells us if the package is a link which is expanded.
|
|
|
|
Returns True if the package is expanded, otherwise False."""
|
|
|
|
return self.linkinfo.isexpanded()
|
|
|
|
|
2007-07-16 15:40:58 +02:00
|
|
|
def update_local_pacmeta(self):
|
|
|
|
"""
|
|
|
|
Update the local _meta file in the store.
|
|
|
|
It is replaced with the version pulled from upstream.
|
|
|
|
"""
|
2007-05-04 23:51:54 +02:00
|
|
|
meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
|
2006-05-19 22:13:29 +02:00
|
|
|
f = open(os.path.join(self.storedir, '_meta'), 'w')
|
|
|
|
f.write(meta)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def findfilebyname(self, n):
|
|
|
|
for i in self.filelist:
|
|
|
|
if i.name == n:
|
|
|
|
return i
|
|
|
|
|
|
|
|
def status(self, n):
|
|
|
|
"""
|
|
|
|
status can be:
|
|
|
|
|
|
|
|
file storefile file present STATUS
|
|
|
|
exists exists in _files
|
|
|
|
|
|
|
|
x x - 'A'
|
2006-05-22 16:12:06 +02:00
|
|
|
x x x ' ' if digest differs: 'M'
|
|
|
|
and if in conflicts file: 'C'
|
2006-05-19 22:13:29 +02:00
|
|
|
x - - '?'
|
|
|
|
x - x 'D' and listed in _to_be_deleted
|
|
|
|
- x x '!'
|
|
|
|
- x - 'D' (when file in working copy is already deleted)
|
|
|
|
- - x 'F' (new in repo, but not yet in working copy)
|
|
|
|
- - - NOT DEFINED
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
known_by_meta = False
|
|
|
|
exists = False
|
|
|
|
exists_in_store = False
|
|
|
|
if n in self.filenamelist:
|
|
|
|
known_by_meta = True
|
2007-11-26 19:13:51 +01:00
|
|
|
if os.path.exists(os.path.join(self.absdir, n)):
|
2006-05-19 22:13:29 +02:00
|
|
|
exists = True
|
|
|
|
if os.path.exists(os.path.join(self.storedir, n)):
|
|
|
|
exists_in_store = True
|
|
|
|
|
|
|
|
|
|
|
|
if exists and not exists_in_store and known_by_meta:
|
|
|
|
state = 'D'
|
|
|
|
elif n in self.to_be_deleted:
|
|
|
|
state = 'D'
|
2006-05-22 16:12:06 +02:00
|
|
|
elif n in self.in_conflict:
|
|
|
|
state = 'C'
|
2006-05-19 22:13:29 +02:00
|
|
|
elif exists and exists_in_store and known_by_meta:
|
|
|
|
#print self.findfilebyname(n)
|
2007-11-26 19:13:51 +01:00
|
|
|
if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
|
2006-05-19 22:13:29 +02:00
|
|
|
state = 'M'
|
|
|
|
else:
|
|
|
|
state = ' '
|
|
|
|
elif exists and not exists_in_store and not known_by_meta:
|
|
|
|
state = '?'
|
|
|
|
elif exists and exists_in_store and not known_by_meta:
|
|
|
|
state = 'A'
|
|
|
|
elif not exists and exists_in_store and known_by_meta:
|
|
|
|
state = '!'
|
|
|
|
elif not exists and not exists_in_store and known_by_meta:
|
|
|
|
state = 'F'
|
|
|
|
elif not exists and exists_in_store and not known_by_meta:
|
|
|
|
state = 'D'
|
|
|
|
elif not exists and not exists_in_store and not known_by_meta:
|
2007-04-25 23:10:49 +02:00
|
|
|
print >>sys.stderr, '%s: not exists and not exists_in_store and not nown_by_meta' % n
|
|
|
|
print >>sys.stderr, 'this code path should never be reached!'
|
2006-05-19 22:13:29 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
return state
|
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def comparePac(self, pac):
|
|
|
|
"""
|
|
|
|
This method compares the local filelist with
|
|
|
|
the filelist of the passed package to see which files
|
|
|
|
were added, removed and changed.
|
|
|
|
"""
|
|
|
|
|
|
|
|
changed_files = []
|
|
|
|
added_files = []
|
|
|
|
removed_files = []
|
|
|
|
|
|
|
|
for file in self.filenamelist:
|
|
|
|
if not file in self.to_be_deleted:
|
|
|
|
if file in pac.filenamelist:
|
|
|
|
if dgst(file) != pac.findfilebyname(file).md5:
|
|
|
|
changed_files.append(file)
|
|
|
|
else:
|
|
|
|
added_files.append(file)
|
|
|
|
|
|
|
|
for file in pac.filenamelist:
|
|
|
|
if (not file in self.filenamelist) or (file in self.to_be_deleted):
|
|
|
|
removed_files.append(file)
|
|
|
|
|
|
|
|
return changed_files, added_files, removed_files
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
def merge(self, otherpac):
|
|
|
|
self.todo += otherpac.todo
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
r = """
|
|
|
|
name: %s
|
|
|
|
prjname: %s
|
|
|
|
workingdir: %s
|
|
|
|
localfilelist: %s
|
2008-03-22 14:37:59 +01:00
|
|
|
linkinfo: %s
|
2006-05-19 22:13:29 +02:00
|
|
|
rev: %s
|
|
|
|
'todo' files: %s
|
|
|
|
""" % (self.name,
|
|
|
|
self.prjname,
|
|
|
|
self.dir,
|
|
|
|
'\n '.join(self.filenamelist),
|
2008-03-22 14:37:59 +01:00
|
|
|
self.linkinfo,
|
2006-05-19 22:13:29 +02:00
|
|
|
self.rev,
|
|
|
|
self.todo)
|
|
|
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2007-06-30 17:39:47 +02:00
|
|
|
def read_meta_from_spec(self, spec = None):
|
2007-07-04 15:48:24 +02:00
|
|
|
import glob
|
2007-06-30 17:39:47 +02:00
|
|
|
if spec:
|
|
|
|
specfile = spec
|
|
|
|
else:
|
2007-07-04 15:48:24 +02:00
|
|
|
# scan for spec files
|
|
|
|
speclist = glob.glob(os.path.join(self.dir, '*.spec'))
|
|
|
|
if len(speclist) == 1:
|
|
|
|
specfile = speclist[0]
|
|
|
|
elif len(speclist) > 1:
|
|
|
|
print 'the following specfiles were found:'
|
|
|
|
for file in speclist:
|
|
|
|
print file
|
|
|
|
print 'please specify one with --specfile'
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print 'no specfile was found - please specify one ' \
|
|
|
|
'with --specfile'
|
|
|
|
sys.exit(1)
|
|
|
|
|
2007-07-23 18:03:30 +02:00
|
|
|
data = read_meta_from_spec(specfile, 'Summary:', '%description')
|
|
|
|
self.summary = data['Summary:']
|
|
|
|
self.descr = data['%description']
|
2006-05-31 14:13:26 +02:00
|
|
|
|
|
|
|
|
2007-07-16 15:40:58 +02:00
|
|
|
def update_package_meta(self):
|
|
|
|
"""
|
|
|
|
for the updatepacmetafromspec subcommand
|
|
|
|
"""
|
2006-05-31 14:13:26 +02:00
|
|
|
|
2007-07-16 15:40:58 +02:00
|
|
|
import tempfile
|
2006-07-14 18:37:34 +02:00
|
|
|
(fd, filename) = tempfile.mkstemp(prefix = 'osc_editmeta.', suffix = '.xml', dir = '/tmp')
|
2006-05-31 14:13:26 +02:00
|
|
|
|
2007-07-16 15:40:58 +02:00
|
|
|
m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
|
2006-05-31 14:13:26 +02:00
|
|
|
|
2006-07-14 18:37:34 +02:00
|
|
|
f = os.fdopen(fd, 'w')
|
2007-07-16 15:40:58 +02:00
|
|
|
f.write(m)
|
2006-05-31 14:13:26 +02:00
|
|
|
f.close()
|
|
|
|
|
|
|
|
tree = ET.parse(filename)
|
|
|
|
tree.find('title').text = self.summary
|
|
|
|
tree.find('description').text = ''.join(self.descr)
|
|
|
|
tree.write(filename)
|
|
|
|
|
|
|
|
print '*' * 36, 'old', '*' * 36
|
2007-07-16 15:40:58 +02:00
|
|
|
print m
|
2006-05-31 14:13:26 +02:00
|
|
|
print '*' * 36, 'new', '*' * 36
|
|
|
|
tree.write(sys.stdout)
|
|
|
|
print '*' * 72
|
|
|
|
|
|
|
|
# FIXME: for testing...
|
|
|
|
# open the new description in $EDITOR instead?
|
|
|
|
repl = raw_input('Write? (y/N) ')
|
|
|
|
if repl == 'y':
|
|
|
|
print 'Sending meta data...',
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
|
2007-04-19 10:47:22 +02:00
|
|
|
http_PUT(u, file=filename)
|
2006-05-31 14:13:26 +02:00
|
|
|
print 'Done.'
|
|
|
|
else:
|
|
|
|
print 'discarding', filename
|
|
|
|
|
|
|
|
os.unlink(filename)
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def update(self, rev = None):
|
|
|
|
# save filelist and (modified) status before replacing the meta file
|
|
|
|
saved_filenames = self.filenamelist
|
|
|
|
saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
|
|
|
|
|
|
|
|
oldp = self
|
|
|
|
self.update_local_filesmeta(rev)
|
|
|
|
self = Package(self.dir)
|
|
|
|
|
|
|
|
# which files do no longer exist upstream?
|
|
|
|
disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
|
|
|
|
|
|
|
|
pathn = getTransActPath(self.dir)
|
|
|
|
|
|
|
|
for filename in saved_filenames:
|
|
|
|
if filename in disappeared:
|
|
|
|
print statfrmt('D', os.path.join(pathn, filename))
|
|
|
|
# keep file if it has local modifications
|
|
|
|
if oldp.status(filename) == ' ':
|
|
|
|
self.delete_localfile(filename)
|
|
|
|
self.delete_storefile(filename)
|
|
|
|
continue
|
|
|
|
|
|
|
|
for filename in self.filenamelist:
|
|
|
|
|
|
|
|
state = self.status(filename)
|
|
|
|
if state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
|
|
|
|
# no merge necessary... local file is changed, but upstream isn't
|
|
|
|
pass
|
|
|
|
elif state == 'M' and filename in saved_modifiedfiles:
|
|
|
|
status_after_merge = self.mergefile(filename)
|
|
|
|
print statfrmt(status_after_merge, os.path.join(pathn, filename))
|
|
|
|
elif state == 'M':
|
|
|
|
self.updatefile(filename, rev)
|
|
|
|
print statfrmt('U', os.path.join(pathn, filename))
|
|
|
|
elif state == '!':
|
|
|
|
self.updatefile(filename, rev)
|
|
|
|
print 'Restored \'%s\'' % os.path.join(pathn, filename)
|
|
|
|
elif state == 'F':
|
|
|
|
self.updatefile(filename, rev)
|
|
|
|
print statfrmt('A', os.path.join(pathn, filename))
|
|
|
|
elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
|
|
|
|
self.updatefile(filename, rev)
|
|
|
|
self.delete_storefile(filename)
|
|
|
|
print statfrmt('U', os.path.join(pathn, filename))
|
|
|
|
elif state == ' ':
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
self.update_local_pacmeta()
|
|
|
|
|
|
|
|
#print ljust(p.name, 45), 'At revision %s.' % p.rev
|
|
|
|
print 'At revision %s.' % self.rev
|
|
|
|
|
2006-05-31 14:13:26 +02:00
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
class RequestState:
|
|
|
|
"""for objects to represent the "state" of a request"""
|
|
|
|
def __init__(self, name=None, who=None, when=None):
|
|
|
|
self.name = name
|
|
|
|
self.who = who
|
|
|
|
self.when = when
|
|
|
|
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
class SubmitReq:
|
|
|
|
"""represent a submit request and holds its metadata
|
2008-03-05 00:41:00 +01:00
|
|
|
it has methods to read in metadata from xml,
|
|
|
|
different views, ..."""
|
|
|
|
def __init__(self):
|
|
|
|
self.reqid = None
|
2008-03-06 11:25:45 +01:00
|
|
|
self.state = RequestState()
|
2008-03-05 00:41:00 +01:00
|
|
|
self.who = None
|
|
|
|
self.when = None
|
|
|
|
self.last_author = None
|
|
|
|
self.src_project = None
|
|
|
|
self.src_package = None
|
2008-03-13 00:37:35 +01:00
|
|
|
self.src_md5 = None
|
2008-03-05 00:41:00 +01:00
|
|
|
self.dst_project = None
|
|
|
|
self.dst_package = None
|
|
|
|
self.descr = None
|
2008-03-06 11:25:45 +01:00
|
|
|
self.statehistory = []
|
2008-03-05 00:41:00 +01:00
|
|
|
|
|
|
|
def read(self, root):
|
|
|
|
self.reqid = root.get('id')
|
|
|
|
|
2008-03-20 20:17:40 +01:00
|
|
|
n = root.find('submit').find('source')
|
2008-03-05 00:41:00 +01:00
|
|
|
self.src_project = n.get('project')
|
|
|
|
self.src_package = n.get('package')
|
2008-03-13 00:37:35 +01:00
|
|
|
try: self.src_md5 = n.get('rev')
|
|
|
|
except: pass
|
2008-03-05 00:41:00 +01:00
|
|
|
|
2008-03-20 20:17:40 +01:00
|
|
|
n = root.find('submit').find('target')
|
2008-03-05 00:41:00 +01:00
|
|
|
self.dst_project = n.get('project')
|
|
|
|
self.dst_package = n.get('package')
|
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
# read the state
|
2008-03-05 00:41:00 +01:00
|
|
|
n = root.find('state')
|
2008-03-06 11:25:45 +01:00
|
|
|
self.state.name, self.state.who, self.state.when \
|
2008-03-05 00:41:00 +01:00
|
|
|
= n.get('name'), n.get('who'), n.get('when')
|
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
# read the state history
|
|
|
|
for h in root.findall('history'):
|
|
|
|
s = RequestState()
|
|
|
|
s.name = h.get('name')
|
|
|
|
s.who = h.get('who')
|
|
|
|
s.when = h.get('when')
|
|
|
|
self.statehistory.append(s)
|
|
|
|
self.statehistory.reverse()
|
|
|
|
|
|
|
|
# read a description, if it exists
|
2008-03-05 00:41:00 +01:00
|
|
|
try:
|
|
|
|
n = root.find('description').text
|
|
|
|
self.descr = n
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def list_view(self):
|
2008-03-06 11:25:45 +01:00
|
|
|
return '%s %-8s %s/%s -> %s/%s %s' % \
|
2008-03-05 00:41:00 +01:00
|
|
|
(self.reqid,
|
2008-03-06 11:25:45 +01:00
|
|
|
self.state.name,
|
2008-03-05 00:41:00 +01:00
|
|
|
self.src_project,
|
|
|
|
self.src_package,
|
|
|
|
self.dst_project,
|
|
|
|
self.dst_package,
|
|
|
|
repr(self.descr) or '')
|
|
|
|
|
|
|
|
def __str__(self):
|
2008-03-06 11:25:45 +01:00
|
|
|
s = """\
|
2008-03-06 00:51:48 +01:00
|
|
|
Request to submit (id %s):
|
2008-03-13 00:37:35 +01:00
|
|
|
|
2008-03-05 00:41:00 +01:00
|
|
|
%s/%s -> %s/%s
|
|
|
|
|
2008-03-13 00:37:35 +01:00
|
|
|
Source revision MD5:
|
|
|
|
%s
|
|
|
|
|
2008-03-05 00:41:00 +01:00
|
|
|
Message:
|
|
|
|
%s
|
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
State: %-10s %s %s
|
|
|
|
""" % (self.reqid,
|
|
|
|
self.src_project,
|
|
|
|
self.src_package,
|
|
|
|
self.dst_project,
|
|
|
|
self.dst_package,
|
2008-03-13 00:37:35 +01:00
|
|
|
self.src_md5 or 'not given',
|
2008-03-06 11:25:45 +01:00
|
|
|
repr(self.descr) or '',
|
|
|
|
self.state.name,
|
|
|
|
self.state.when, self.state.who)
|
2008-03-05 00:41:00 +01:00
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
if len(self.statehistory):
|
|
|
|
histitems = [ '%-10s %s %s' \
|
|
|
|
% (i.name, i.when, i.who) \
|
|
|
|
for i in self.statehistory ]
|
|
|
|
s += 'History: ' + '\n '.join(histitems)
|
|
|
|
|
|
|
|
s += '\n'
|
|
|
|
return s
|
2008-03-05 00:41:00 +01:00
|
|
|
|
|
|
|
|
2007-06-26 15:17:38 +02:00
|
|
|
def shorttime(t):
|
|
|
|
"""format time as Apr 02 18:19
|
|
|
|
or Apr 02 2005
|
|
|
|
depending on whether it is in the current year
|
|
|
|
"""
|
|
|
|
import time
|
|
|
|
|
|
|
|
if time.localtime()[0] == time.localtime(t)[0]:
|
|
|
|
# same year
|
|
|
|
return time.strftime('%b %d %H:%M',time.localtime(t))
|
|
|
|
else:
|
|
|
|
return time.strftime('%b %d %Y',time.localtime(t))
|
2006-05-31 14:13:26 +02:00
|
|
|
|
|
|
|
|
2006-05-22 12:50:37 +02:00
|
|
|
def is_project_dir(d):
|
2007-05-04 23:51:54 +02:00
|
|
|
return os.path.exists(os.path.join(d, store, '_project')) and not \
|
|
|
|
os.path.exists(os.path.join(d, store, '_package'))
|
2006-05-22 12:50:37 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2006-09-29 15:15:41 +02:00
|
|
|
def is_package_dir(d):
|
2007-05-04 23:51:54 +02:00
|
|
|
return os.path.exists(os.path.join(d, store, '_project')) and \
|
|
|
|
os.path.exists(os.path.join(d, store, '_package'))
|
2006-09-29 15:15:41 +02:00
|
|
|
|
|
|
|
|
2007-06-26 15:17:38 +02:00
|
|
|
def slash_split(l):
|
|
|
|
"""Split command line arguments like 'foo/bar' into 'foo' 'bar'.
|
|
|
|
This is handy to allow copy/paste a project/package combination in this form.
|
|
|
|
"""
|
|
|
|
r = []
|
|
|
|
for i in l:
|
|
|
|
r += i.split('/')
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
def findpacs(files):
|
|
|
|
pacs = []
|
|
|
|
for f in files:
|
|
|
|
p = filedir_to_pac(f)
|
|
|
|
known = None
|
|
|
|
for i in pacs:
|
|
|
|
if i.name == p.name:
|
|
|
|
known = i
|
|
|
|
break
|
|
|
|
if known:
|
|
|
|
i.merge(p)
|
|
|
|
else:
|
|
|
|
pacs.append(p)
|
|
|
|
return pacs
|
|
|
|
|
|
|
|
|
|
|
|
def read_filemeta(dir):
|
|
|
|
return ET.parse(os.path.join(dir, store, '_files'))
|
|
|
|
|
|
|
|
|
|
|
|
def read_tobedeleted(dir):
|
|
|
|
r = []
|
|
|
|
fname = os.path.join(dir, store, '_to_be_deleted')
|
|
|
|
|
|
|
|
if os.path.exists(fname):
|
2006-06-26 17:11:22 +02:00
|
|
|
r = [ line.strip() for line in open(fname) ]
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2006-05-22 16:12:06 +02:00
|
|
|
def read_inconflict(dir):
|
|
|
|
r = []
|
|
|
|
fname = os.path.join(dir, store, '_in_conflict')
|
|
|
|
|
|
|
|
if os.path.exists(fname):
|
2006-06-26 17:11:22 +02:00
|
|
|
r = [ line.strip() for line in open(fname) ]
|
2006-05-22 16:12:06 +02:00
|
|
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2006-05-23 15:48:58 +02:00
|
|
|
def parseargs(list_of_args):
|
|
|
|
if list_of_args:
|
2008-01-17 20:33:02 +01:00
|
|
|
return list(list_of_args)
|
2006-05-11 09:27:50 +02:00
|
|
|
else:
|
2006-05-23 15:48:58 +02:00
|
|
|
return [ os.curdir ]
|
2006-05-11 09:27:50 +02:00
|
|
|
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
def filedir_to_pac(f):
|
|
|
|
|
|
|
|
if os.path.isdir(f):
|
|
|
|
wd = f
|
|
|
|
p = Package(wd)
|
|
|
|
|
|
|
|
else:
|
|
|
|
wd = os.path.dirname(f)
|
|
|
|
if wd == '':
|
|
|
|
wd = os.curdir
|
|
|
|
p = Package(wd)
|
|
|
|
p.todo = [ os.path.basename(f) ]
|
|
|
|
|
|
|
|
return p
|
|
|
|
|
|
|
|
|
|
|
|
def statfrmt(statusletter, filename):
|
|
|
|
return '%s %s' % (statusletter, filename)
|
|
|
|
|
|
|
|
|
2006-09-29 15:15:41 +02:00
|
|
|
def pathjoin(a, *p):
|
|
|
|
"""Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
|
|
|
|
path = os.path.join(a, *p)
|
|
|
|
if path.startswith('./'):
|
|
|
|
path = path[2:]
|
|
|
|
return path
|
|
|
|
|
|
|
|
|
2007-05-09 11:36:55 +02:00
|
|
|
def makeurl(baseurl, l, query=[]):
|
2008-03-17 22:10:34 +01:00
|
|
|
"""Given a list of path compoments, construct a complete URL.
|
|
|
|
|
|
|
|
Optional parameters for a query string can be given as a list, as a
|
|
|
|
dictionary, or as an already assembled string.
|
|
|
|
In case of a dictionary, the parameters will be urlencoded by this
|
|
|
|
function. In case of a list not -- this is to be backwards compatible.
|
|
|
|
"""
|
2007-05-04 23:51:54 +02:00
|
|
|
|
2007-05-09 11:36:55 +02:00
|
|
|
#print 'makeurl:', baseurl, l, query
|
2007-05-04 23:51:54 +02:00
|
|
|
|
2008-03-17 22:10:34 +01:00
|
|
|
if type(query) == type(list()):
|
|
|
|
query = '&'.join(query)
|
|
|
|
elif type(query) == type(dict()):
|
|
|
|
query = urlencode(query)
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
scheme, netloc = urlsplit(baseurl)[0:2]
|
2008-03-17 22:10:34 +01:00
|
|
|
return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2008-03-16 21:46:16 +01:00
|
|
|
def http_request(method, url, headers={}, data=None, file=None):
|
2007-04-19 10:47:22 +02:00
|
|
|
"""wrapper around urllib2.urlopen for error handling,
|
|
|
|
and to support additional (PUT, DELETE) methods"""
|
|
|
|
|
2007-04-19 11:12:39 +02:00
|
|
|
filefd = None
|
|
|
|
|
2007-04-25 01:00:12 +02:00
|
|
|
if conf.config['http_debug']:
|
2007-04-19 10:47:22 +02:00
|
|
|
print
|
|
|
|
print
|
|
|
|
print '--', method, url
|
|
|
|
|
|
|
|
if method == 'POST' and not file and not data:
|
|
|
|
# adding data to an urllib2 request transforms it into a POST
|
|
|
|
data = ''
|
|
|
|
|
|
|
|
req = urllib2.Request(url)
|
|
|
|
req.get_method = lambda: method
|
|
|
|
|
2007-06-21 17:14:15 +02:00
|
|
|
# POST requests are application/x-www-form-urlencoded per default
|
|
|
|
# since we change the request into PUT, we also need to adjust the content type header
|
|
|
|
if method == 'PUT':
|
|
|
|
req.add_header('Content-Type', 'application/octet-stream')
|
|
|
|
|
2008-03-16 21:46:16 +01:00
|
|
|
if type(headers) == type({}):
|
|
|
|
for i in headers.keys():
|
|
|
|
print headers[i]
|
|
|
|
req.add_header(i, headers[i])
|
|
|
|
|
2007-04-19 11:12:39 +02:00
|
|
|
if file and not data:
|
|
|
|
size = os.path.getsize(file)
|
|
|
|
if size < 1024*512:
|
|
|
|
data = open(file).read()
|
|
|
|
else:
|
|
|
|
import mmap
|
|
|
|
filefd = open(file, 'r+')
|
2008-03-16 20:02:38 +01:00
|
|
|
try:
|
|
|
|
data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
|
|
|
|
except EnvironmentError, e:
|
|
|
|
if e.errno == 19:
|
|
|
|
sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
|
|
|
|
'\non a filesystem which does not support this.' % (e, file))
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2007-04-19 11:12:39 +02:00
|
|
|
|
2007-04-19 10:47:22 +02:00
|
|
|
fd = urllib2.urlopen(req, data=data)
|
2007-04-19 11:12:39 +02:00
|
|
|
|
2007-04-19 12:40:18 +02:00
|
|
|
if hasattr(conf.cookiejar, 'save'):
|
|
|
|
conf.cookiejar.save(ignore_discard=True)
|
|
|
|
|
2007-04-19 11:12:39 +02:00
|
|
|
if filefd: filefd.close()
|
2008-03-11 17:39:47 +01:00
|
|
|
|
|
|
|
# this code is for debugging empty responses from api.opensuse.org
|
|
|
|
# https://bugzilla.novell.com/show_bug.cgi?id=369176
|
|
|
|
# Buildservice server sometimes sends broken replies
|
|
|
|
# only prjconf requests (_config) can have empty replies (we hope)
|
2008-03-11 18:09:25 +01:00
|
|
|
if False and fd.headers['Content-Length'] == '0' and not fd.url.endswith('/_config'):
|
2008-03-11 17:39:47 +01:00
|
|
|
print 'DEBUG INFO'
|
|
|
|
print
|
|
|
|
import time; print time.ctime()
|
|
|
|
print url
|
|
|
|
print
|
|
|
|
print 'Request Headers:'
|
|
|
|
for i in req.header_items():
|
|
|
|
print i
|
|
|
|
print
|
|
|
|
print 'Reply:'
|
|
|
|
try:
|
|
|
|
print fd.code, fd.msg
|
|
|
|
except:
|
|
|
|
print 'could not print fd.code, fd.msg'
|
|
|
|
try:
|
|
|
|
print fd.url
|
|
|
|
except:
|
|
|
|
print 'could not print fd.url'
|
|
|
|
try:
|
|
|
|
print fd.headers
|
|
|
|
except:
|
|
|
|
print 'could not print fd.headers'
|
|
|
|
|
|
|
|
print
|
|
|
|
print 'An empty reply was received. This is a bug...'
|
|
|
|
print 'Please go to https://bugzilla.novell.com/show_bug.cgi?id=369176 and add the above info.'
|
|
|
|
print 'Thanks!'
|
|
|
|
print
|
|
|
|
sys.exit(1)
|
|
|
|
|
2007-04-19 10:47:22 +02:00
|
|
|
return fd
|
|
|
|
|
|
|
|
|
|
|
|
def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
|
|
|
|
def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
|
|
|
|
def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
|
|
|
|
def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
|
|
|
|
|
|
|
|
|
2008-03-11 17:39:47 +01:00
|
|
|
# obsolete!
|
2006-09-15 13:47:34 +02:00
|
|
|
def urlopen(url, data=None):
|
2006-07-12 16:14:50 +02:00
|
|
|
"""wrapper around urllib2.urlopen for error handling"""
|
2007-05-09 11:36:55 +02:00
|
|
|
|
2007-04-19 10:47:22 +02:00
|
|
|
print 'core.urlopen() is deprecated -- use http_GET et al.'
|
2006-07-12 16:14:50 +02:00
|
|
|
|
|
|
|
try:
|
2006-09-15 13:47:34 +02:00
|
|
|
# adding data to the request makes it a POST
|
2007-04-19 10:47:22 +02:00
|
|
|
if not data:
|
|
|
|
fd = http_GET(url)
|
|
|
|
else:
|
|
|
|
fd = http_POST(url, data=data)
|
2006-07-12 16:14:50 +02:00
|
|
|
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print >>sys.stderr, 'Error: can\'t get \'%s\'' % url
|
|
|
|
print >>sys.stderr, e
|
2006-09-18 18:53:55 +02:00
|
|
|
if e.code == 500:
|
|
|
|
print >>sys.stderr, '\nDebugging output follows.\nurl:\n%s\nresponse:\n%s' % (url, e.read())
|
2006-07-12 16:14:50 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
return fd
|
|
|
|
|
2007-11-29 13:54:07 +01:00
|
|
|
def init_project_dir(apiurl, dir, project):
|
|
|
|
if not os.path.exists(dir):
|
|
|
|
os.mkdir(dir)
|
|
|
|
os.mkdir(os.path.join(dir, store))
|
|
|
|
|
|
|
|
store_write_project(dir, project)
|
|
|
|
store_write_apiurl(dir, apiurl)
|
2008-03-10 19:04:23 +01:00
|
|
|
if conf.config['do_package_tracking']:
|
|
|
|
store_write_initial_packages(dir, project, [])
|
2006-07-12 16:14:50 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
|
2006-04-20 16:26:50 +02:00
|
|
|
if not os.path.isdir(store):
|
|
|
|
os.mkdir(store)
|
|
|
|
os.chdir(store)
|
|
|
|
f = open('_project', 'w')
|
|
|
|
f.write(project + '\n')
|
|
|
|
f.close
|
|
|
|
f = open('_package', 'w')
|
|
|
|
f.write(package + '\n')
|
|
|
|
f.close
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
if files:
|
|
|
|
f = open('_files', 'w')
|
|
|
|
f.write(''.join(show_files_meta(apiurl, project, package, revision)))
|
|
|
|
f.close()
|
|
|
|
else:
|
|
|
|
# create dummy
|
|
|
|
ET.ElementTree(element=ET.Element('directory')).write('_files')
|
2006-04-28 17:37:25 +02:00
|
|
|
|
|
|
|
f = open('_osclib_version', 'w')
|
|
|
|
f.write(__version__ + '\n')
|
2006-04-20 16:26:50 +02:00
|
|
|
f.close()
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
store_write_apiurl(os.path.pardir, apiurl)
|
|
|
|
|
2006-05-22 10:16:31 +02:00
|
|
|
os.chdir(os.pardir)
|
2006-04-20 16:26:50 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
def check_store_version(dir):
|
|
|
|
versionfile = os.path.join(dir, store, '_osclib_version')
|
2006-04-28 17:37:25 +02:00
|
|
|
try:
|
2006-05-19 22:13:29 +02:00
|
|
|
v = open(versionfile).read().strip()
|
2006-04-28 17:37:25 +02:00
|
|
|
except:
|
|
|
|
v = ''
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
if v == '':
|
2007-04-25 23:10:49 +02:00
|
|
|
print >>sys.stderr, 'error: "%s" is not an osc working copy' % dir
|
2006-05-19 22:13:29 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
if v != __version__:
|
2007-10-30 12:47:06 +01:00
|
|
|
if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98']:
|
2006-05-22 16:23:13 +02:00
|
|
|
# version is fine, no migration needed
|
2006-05-19 22:13:29 +02:00
|
|
|
f = open(versionfile, 'w')
|
|
|
|
f.write(__version__ + '\n')
|
|
|
|
f.close()
|
|
|
|
return
|
2007-04-25 23:10:49 +02:00
|
|
|
print >>sys.stderr
|
|
|
|
print >>sys.stderr, 'the osc metadata of your working copy "%s"' % dir
|
|
|
|
print >>sys.stderr, 'has the wrong version (%s), should be %s' % (v, __version__)
|
|
|
|
print >>sys.stderr, 'please do a fresh checkout'
|
|
|
|
print >>sys.stderr
|
2006-04-28 17:37:25 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def meta_get_packagelist(apiurl, prj):
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(apiurl, ['source', prj])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-10-10 16:04:34 +02:00
|
|
|
root = ET.parse(f).getroot()
|
2006-08-07 12:08:54 +02:00
|
|
|
return [ node.get('name') for node in root.findall('entry') ]
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2007-06-26 15:17:38 +02:00
|
|
|
def meta_get_filelist(apiurl, prj, package, verbose=False):
|
|
|
|
"""return a list of file names,
|
|
|
|
or a list File() instances if verbose=True"""
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(apiurl, ['source', prj, package])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-06-26 17:11:22 +02:00
|
|
|
root = ET.parse(f).getroot()
|
2007-06-26 15:17:38 +02:00
|
|
|
|
|
|
|
if not verbose:
|
2008-03-13 22:45:24 +01:00
|
|
|
return [ node.get('name') for node in root.findall('entry') ]
|
2007-06-26 15:17:38 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
l = []
|
|
|
|
rev = int(root.get('rev'))
|
2008-03-13 22:45:24 +01:00
|
|
|
for node in root.findall('entry'):
|
2007-06-26 15:17:38 +02:00
|
|
|
f = File(node.get('name'),
|
|
|
|
node.get('md5'),
|
|
|
|
int(node.get('size')),
|
|
|
|
int(node.get('mtime')))
|
|
|
|
f.rev = rev
|
|
|
|
l.append(f)
|
|
|
|
return l
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def meta_get_project_list(apiurl):
|
|
|
|
u = makeurl(apiurl, ['source'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-10-10 16:04:34 +02:00
|
|
|
root = ET.parse(f).getroot()
|
2006-06-26 17:11:22 +02:00
|
|
|
return sorted([ node.get('name') for node in root ])
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def show_project_meta(apiurl, prj):
|
|
|
|
url = makeurl(apiurl, ['source', prj, '_meta'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(url)
|
2006-04-20 16:26:50 +02:00
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
2007-06-26 15:17:38 +02:00
|
|
|
def show_project_conf(apiurl, prj):
|
|
|
|
url = makeurl(apiurl, ['source', prj, '_config'])
|
|
|
|
f = http_GET(url)
|
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def show_package_meta(apiurl, prj, pac):
|
2006-07-05 14:09:09 +02:00
|
|
|
try:
|
2007-05-04 23:51:54 +02:00
|
|
|
url = makeurl(apiurl, ['source', prj, pac, '_meta'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(url)
|
2006-07-05 14:09:09 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print >>sys.stderr, 'error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
|
|
|
|
print >>sys.stderr, e
|
2006-09-15 15:24:41 +02:00
|
|
|
if e.code == 500:
|
|
|
|
print >>sys.stderr, '\nDebugging output follows.\nurl:\n%s\nresponse:\n%s' % (url, e.read())
|
2006-07-05 14:09:09 +02:00
|
|
|
sys.exit(1)
|
2006-04-20 16:26:50 +02:00
|
|
|
return f.readlines()
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
|
2007-07-19 12:08:42 +02:00
|
|
|
def show_pattern_metalist(apiurl, prj):
|
|
|
|
url = makeurl(apiurl, ['source', prj, '_pattern'])
|
|
|
|
f = http_GET(url)
|
|
|
|
tree = ET.parse(f)
|
|
|
|
r = [ node.get('name') for node in tree.getroot() ]
|
|
|
|
r.sort()
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def show_pattern_meta(apiurl, prj, pattern):
|
|
|
|
url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
|
|
|
|
try:
|
|
|
|
f = http_GET(url)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print >>sys.stderr, 'error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
|
|
|
|
print >>sys.stderr, e
|
|
|
|
sys.exit(1)
|
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
2007-01-12 04:52:13 +01:00
|
|
|
class metafile:
|
|
|
|
"""metafile that can be manipulated and is stored back after manipulation."""
|
2007-07-16 18:20:09 +02:00
|
|
|
def __init__(self, url, input, change_is_required=False):
|
2007-01-12 04:52:13 +01:00
|
|
|
import tempfile
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-07-16 18:20:09 +02:00
|
|
|
self.url = url
|
2007-01-12 04:52:13 +01:00
|
|
|
self.change_is_required = change_is_required
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-07-16 18:20:09 +02:00
|
|
|
(fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = '.xml', dir = '/tmp')
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-01-12 04:52:13 +01:00
|
|
|
f = os.fdopen(fd, 'w')
|
2007-07-16 18:20:09 +02:00
|
|
|
f.write(''.join(input))
|
2007-01-12 04:52:13 +01:00
|
|
|
f.close()
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-07-13 18:01:02 +02:00
|
|
|
self.hash_orig = dgst(self.filename)
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-01-12 04:52:13 +01:00
|
|
|
def sync(self):
|
2007-07-13 18:01:02 +02:00
|
|
|
hash = dgst(self.filename)
|
|
|
|
if self.change_is_required == True and hash == self.hash_orig:
|
2007-01-12 04:52:13 +01:00
|
|
|
print 'File unchanged. Not saving.'
|
|
|
|
os.unlink(self.filename)
|
2007-06-22 14:36:13 +02:00
|
|
|
return True
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-06-14 16:30:36 +02:00
|
|
|
try:
|
|
|
|
print 'Sending meta data...'
|
2007-04-19 10:47:22 +02:00
|
|
|
http_PUT(self.url, file=self.filename)
|
2007-01-12 04:52:13 +01:00
|
|
|
os.unlink(self.filename)
|
|
|
|
print 'Done.'
|
2007-06-22 14:36:13 +02:00
|
|
|
return True
|
2007-06-14 16:30:36 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
# internal server error (probably the xml file is incorrect)
|
2007-07-16 19:06:06 +02:00
|
|
|
if e.code == 400:
|
|
|
|
print >>sys.stderr, 'Cannot save meta data.'
|
|
|
|
print >>sys.stderr, e
|
|
|
|
print >>sys.stderr, e.read()
|
|
|
|
return False
|
2007-06-14 16:30:36 +02:00
|
|
|
if e.code == 500:
|
2007-07-16 19:06:06 +02:00
|
|
|
print >>sys.stderr, 'Cannot save meta data. Unknown error.'
|
2007-06-14 16:30:36 +02:00
|
|
|
print >>sys.stderr, e
|
|
|
|
# this may be unhelpful... because it may just print a big blob of uninteresting
|
|
|
|
# ichain html and javascript... however it could potentially be useful if the orign
|
|
|
|
# server returns an information body
|
2007-07-16 15:40:58 +02:00
|
|
|
if conf.config['http_debug']:
|
|
|
|
print >>sys.stderr, e.read()
|
2007-06-14 16:30:36 +02:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
print >> sys.stderr, 'cannot save meta data - an unexpected error occured'
|
|
|
|
return False
|
2007-01-12 04:52:13 +01:00
|
|
|
|
2007-07-13 18:01:02 +02:00
|
|
|
|
|
|
|
# different types of metadata
|
|
|
|
metatypes = { 'prj': { 'path': 'source/%s/_meta',
|
|
|
|
'template': new_project_templ,
|
|
|
|
},
|
|
|
|
'pkg': { 'path' : 'source/%s/%s/_meta',
|
|
|
|
'template': new_package_templ,
|
|
|
|
},
|
|
|
|
'prjconf': { 'path': 'source/%s/_config',
|
|
|
|
'template': '',
|
|
|
|
},
|
|
|
|
'user': { 'path': 'person/%s',
|
|
|
|
'template': new_user_template,
|
|
|
|
},
|
2007-07-19 12:08:42 +02:00
|
|
|
'pattern': { 'path': 'source/%s/_pattern/%s',
|
|
|
|
'template': new_pattern_template,
|
|
|
|
},
|
2007-07-13 18:01:02 +02:00
|
|
|
}
|
|
|
|
|
2007-07-23 18:03:30 +02:00
|
|
|
def meta_exists(metatype,
|
|
|
|
path_args=None,
|
|
|
|
template_args=None,
|
2008-02-18 14:44:20 +01:00
|
|
|
create_new=True,
|
|
|
|
apiurl=None):
|
2007-07-23 18:03:30 +02:00
|
|
|
|
|
|
|
data = None
|
2008-02-18 14:44:20 +01:00
|
|
|
if not apiurl:
|
|
|
|
apiurl = conf.config['apiurl']
|
|
|
|
url = make_meta_url(metatype, path_args, apiurl)
|
2007-07-23 18:03:30 +02:00
|
|
|
try:
|
|
|
|
data = http_GET(url).readlines()
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code == 404:
|
|
|
|
if create_new:
|
|
|
|
data = metatypes[metatype]['template']
|
|
|
|
if template_args:
|
|
|
|
data = data % template_args
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'error getting metadata for type \'%s\' at URL \'%s\':' \
|
|
|
|
% (metatype, url)
|
2007-08-04 23:11:31 +02:00
|
|
|
return data
|
2007-07-16 18:20:09 +02:00
|
|
|
|
2008-02-18 14:44:20 +01:00
|
|
|
def make_meta_url(metatype, path_args=None, apiurl=None):
|
|
|
|
if not apiurl:
|
|
|
|
apiurl = conf.config['apiurl']
|
2007-07-16 18:20:09 +02:00
|
|
|
if metatype not in metatypes.keys():
|
|
|
|
sys.exit('unknown metatype %s' % metatype)
|
2007-07-13 18:01:02 +02:00
|
|
|
path = metatypes[metatype]['path']
|
2007-07-23 18:03:30 +02:00
|
|
|
|
2007-07-13 18:01:02 +02:00
|
|
|
if path_args:
|
|
|
|
path = path % path_args
|
|
|
|
|
2008-02-18 14:44:20 +01:00
|
|
|
return makeurl(apiurl, [path])
|
2007-07-23 18:03:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
def edit_meta(metatype,
|
|
|
|
path_args=None,
|
|
|
|
data=None,
|
|
|
|
template_args=None,
|
|
|
|
edit=False,
|
2008-03-10 19:04:23 +01:00
|
|
|
change_is_required=False,
|
|
|
|
apiurl=None):
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
if not apiurl:
|
|
|
|
apiurl = conf.config['apiurl']
|
2007-07-16 18:20:09 +02:00
|
|
|
if not data:
|
2007-07-23 18:03:30 +02:00
|
|
|
data = meta_exists(metatype,
|
|
|
|
path_args,
|
|
|
|
template_args,
|
2008-03-10 19:04:23 +01:00
|
|
|
create_new=True,
|
|
|
|
apiurl=apiurl)
|
2007-07-16 18:20:09 +02:00
|
|
|
|
|
|
|
if edit:
|
|
|
|
change_is_required = True
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
url = make_meta_url(metatype, path_args, apiurl)
|
2007-07-16 18:20:09 +02:00
|
|
|
f=metafile(url, data, change_is_required)
|
|
|
|
|
|
|
|
if edit:
|
|
|
|
editor = os.getenv('EDITOR', default='vim')
|
|
|
|
while 1:
|
|
|
|
os.system('%s %s' % (editor, f.filename))
|
|
|
|
if change_is_required == True:
|
|
|
|
if not f.sync():
|
|
|
|
input = raw_input('Try again? (yY = Yes - nN = No): ')
|
|
|
|
if input != 'y' and input != 'Y':
|
|
|
|
break
|
|
|
|
else:
|
2007-06-14 16:30:36 +02:00
|
|
|
break
|
|
|
|
else:
|
2007-07-16 18:20:09 +02:00
|
|
|
f.sync()
|
2007-06-14 16:30:36 +02:00
|
|
|
break
|
2007-07-16 18:20:09 +02:00
|
|
|
else:
|
|
|
|
f.sync()
|
2006-05-23 17:27:43 +02:00
|
|
|
|
2007-06-26 15:17:38 +02:00
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def show_files_meta(apiurl, prj, pac, revision=None):
|
|
|
|
query = []
|
|
|
|
if revision:
|
|
|
|
query.append('rev=%s' % revision)
|
|
|
|
f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
|
2006-04-28 17:37:25 +02:00
|
|
|
return f.readlines()
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2008-03-13 00:37:35 +01:00
|
|
|
def show_upstream_srcmd5(apiurl, prj, pac):
|
|
|
|
m = show_files_meta(apiurl, prj, pac)
|
|
|
|
return ET.parse(StringIO(''.join(m))).getroot().get('srcmd5')
|
|
|
|
|
|
|
|
|
2008-03-24 21:24:49 +01:00
|
|
|
def show_upstream_xsrcmd5(apiurl, prj, pac):
|
|
|
|
m = show_files_meta(apiurl, prj, pac)
|
|
|
|
try:
|
|
|
|
# only source link packages have a <linkinfo> element.
|
|
|
|
return ET.parse(StringIO(''.join(m))).getroot().find('linkinfo').get('xsrcmd5')
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def show_upstream_rev(apiurl, prj, pac):
|
|
|
|
m = show_files_meta(apiurl, prj, pac)
|
2006-06-06 12:50:40 +02:00
|
|
|
return ET.parse(StringIO(''.join(m))).getroot().get('rev')
|
|
|
|
|
|
|
|
|
2007-07-23 18:03:30 +02:00
|
|
|
def read_meta_from_spec(specfile, *args):
|
2007-07-11 23:14:58 +02:00
|
|
|
import codecs, locale
|
2007-07-23 18:03:30 +02:00
|
|
|
"""
|
|
|
|
Read tags and sections from spec file. To read out
|
|
|
|
a tag the passed argument must end with a colon. To
|
|
|
|
read out a section the passed argument must start with
|
|
|
|
a '%'.
|
|
|
|
This method returns a dictionary which contains the
|
|
|
|
requested data.
|
|
|
|
"""
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
if not os.path.isfile(specfile):
|
|
|
|
print 'file \'%s\' is not a readable file' % specfile
|
2007-07-23 18:03:30 +02:00
|
|
|
sys.exit(1)
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-07-11 23:14:58 +02:00
|
|
|
try:
|
|
|
|
lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
lines = open(specfile).readlines()
|
2006-06-26 17:11:22 +02:00
|
|
|
|
2007-07-23 18:03:30 +02:00
|
|
|
tags = []
|
|
|
|
sections = []
|
|
|
|
spec_data = {}
|
|
|
|
|
|
|
|
for itm in args:
|
|
|
|
if itm.endswith(':'):
|
|
|
|
tags.append(itm)
|
|
|
|
elif itm.startswith('%'):
|
|
|
|
sections.append(itm)
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'error - \'%s\' is not a tag nor a section' % itm
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
for tag in tags:
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith(tag):
|
|
|
|
spec_data[tag] = line.split(':')[1].strip()
|
|
|
|
break
|
|
|
|
if not spec_data.has_key(tag):
|
|
|
|
print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
for section in sections:
|
|
|
|
try:
|
|
|
|
start = lines.index(section + '\n') + 1
|
|
|
|
except ValueError:
|
|
|
|
print >>sys.stderr, 'error - section \'%s\' does not exist' % section
|
|
|
|
sys.exit(1)
|
|
|
|
data = []
|
|
|
|
for line in lines[start:]:
|
|
|
|
if line.startswith('%'):
|
|
|
|
break
|
|
|
|
data.append(line)
|
|
|
|
spec_data[section] = data
|
|
|
|
|
|
|
|
return spec_data
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
def create_submit_request(apiurl,
|
2008-03-05 00:41:00 +01:00
|
|
|
src_project, src_package,
|
|
|
|
dst_project, dst_package,
|
2008-03-13 00:37:35 +01:00
|
|
|
message, orev=None):
|
2008-03-05 00:41:00 +01:00
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
import cgi
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
r = SubmitReq()
|
2008-03-05 00:41:00 +01:00
|
|
|
r.src_project = src_project
|
|
|
|
r.src_package = src_package
|
2008-03-13 00:37:35 +01:00
|
|
|
r.src_md5 = orev or show_upstream_srcmd5(apiurl, src_project, src_package)
|
2008-03-05 00:41:00 +01:00
|
|
|
r.dst_project = dst_project
|
|
|
|
r.dst_package = dst_package
|
2008-03-06 11:25:45 +01:00
|
|
|
r.descr = cgi.escape(message or '')
|
2008-03-05 00:41:00 +01:00
|
|
|
|
|
|
|
xml = """\
|
2008-03-20 20:17:40 +01:00
|
|
|
<request type="submit">
|
|
|
|
<submit>
|
2008-03-13 00:37:35 +01:00
|
|
|
<source project="%s" package="%s" rev="%s"/>
|
2008-03-05 00:41:00 +01:00
|
|
|
<target project="%s" package="%s" />
|
2008-03-20 20:17:40 +01:00
|
|
|
</submit>
|
2008-03-05 00:41:00 +01:00
|
|
|
<state name="new"/>
|
|
|
|
<description>%s</description>
|
|
|
|
</request>
|
|
|
|
""" % (r.src_project,
|
|
|
|
r.src_package,
|
2008-03-13 00:37:35 +01:00
|
|
|
r.src_md5,
|
2008-03-05 00:41:00 +01:00
|
|
|
r.dst_project,
|
|
|
|
r.dst_package,
|
|
|
|
r.descr)
|
|
|
|
|
|
|
|
u = makeurl(apiurl, ['request'], query=['cmd=create'])
|
|
|
|
f = http_POST(u, data=xml)
|
|
|
|
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
return root.get('id')
|
|
|
|
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
def get_submit_request(apiurl, reqid):
|
2008-03-05 00:41:00 +01:00
|
|
|
u = makeurl(apiurl, ['request', reqid])
|
|
|
|
f = http_GET(u)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
r = SubmitReq()
|
2008-03-05 00:41:00 +01:00
|
|
|
r.read(root)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2008-03-06 11:25:45 +01:00
|
|
|
def change_submit_request_state(apiurl, reqid, newstate, message=''):
|
|
|
|
u = makeurl(apiurl,
|
|
|
|
['request', reqid],
|
|
|
|
query=['cmd=changestate', 'newstate=%s' % newstate])
|
|
|
|
f = http_POST(u, data=message)
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
2008-03-06 00:51:48 +01:00
|
|
|
def get_submit_request_list(apiurl, project, package):
|
2008-03-20 20:17:40 +01:00
|
|
|
match = 'submit/target/@project=\'%s\'' % quote_plus(project)
|
2008-03-05 00:41:00 +01:00
|
|
|
if package:
|
2008-03-20 20:17:40 +01:00
|
|
|
match += '%20and%20' + 'submit/target/@package=\'%s\'' % quote_plus(package)
|
2008-03-05 00:41:00 +01:00
|
|
|
|
|
|
|
u = makeurl(apiurl, ['search', 'request'], ['match=%s' % match])
|
|
|
|
f = http_GET(u)
|
|
|
|
collection = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
requests = []
|
|
|
|
for root in collection.findall('request'):
|
2008-03-06 00:51:48 +01:00
|
|
|
r = SubmitReq()
|
2008-03-05 00:41:00 +01:00
|
|
|
r.read(root)
|
2008-03-12 19:22:11 +01:00
|
|
|
if r.state.name not in ['accepted', 'declined', 'deleted']:
|
2008-03-05 22:41:30 +01:00
|
|
|
requests.append(r)
|
2008-03-05 00:41:00 +01:00
|
|
|
|
|
|
|
return requests
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_user_meta(apiurl, user):
|
|
|
|
u = makeurl(apiurl, ['person', quote_plus(user)])
|
2006-05-22 16:57:11 +02:00
|
|
|
try:
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-05-22 16:57:11 +02:00
|
|
|
return ''.join(f.readlines())
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
print 'user \'%s\' not found' % user
|
|
|
|
return None
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2007-09-03 18:18:37 +02:00
|
|
|
def get_user_data(apiurl, user, *tags):
|
|
|
|
"""get specified tags from the user meta"""
|
|
|
|
meta = get_user_meta(apiurl, user)
|
|
|
|
data = []
|
|
|
|
if meta != None:
|
|
|
|
root = ET.fromstring(meta)
|
|
|
|
for tag in tags:
|
|
|
|
try:
|
|
|
|
if root.find(tag).text != None:
|
|
|
|
data.append(root.find(tag).text)
|
|
|
|
else:
|
|
|
|
# tag is empty
|
|
|
|
data.append('-')
|
|
|
|
except AttributeError:
|
|
|
|
# this part is reached if the tags tuple contains an invalid tag
|
|
|
|
print 'The xml file for user \'%s\' seems to be broken' % user
|
|
|
|
return None
|
|
|
|
return data
|
|
|
|
else:
|
2007-09-03 12:17:04 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision = None):
|
|
|
|
query = []
|
|
|
|
if revision:
|
|
|
|
query.append('rev=%s' % quote_plus(revision))
|
|
|
|
|
|
|
|
u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
|
|
|
|
# print 'url: %s' % u
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
o = open(targetfilename or filename, 'w')
|
2006-04-20 16:26:50 +02:00
|
|
|
while 1:
|
|
|
|
buf = f.read(BUFSIZE)
|
|
|
|
if not buf: break
|
|
|
|
o.write(buf)
|
|
|
|
o.close()
|
|
|
|
|
|
|
|
|
2007-08-16 20:15:08 +02:00
|
|
|
def get_binary_file(apiurl, prj, repo, arch,
|
|
|
|
filename, targetfilename=None,
|
|
|
|
package=None,
|
|
|
|
progress_meter=False):
|
|
|
|
|
|
|
|
where = package or '_repository'
|
|
|
|
u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
|
|
|
|
|
|
|
|
if progress_meter:
|
|
|
|
sys.stdout.write("Downloading %s [ 0%%]" % filename)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
f = http_GET(u)
|
|
|
|
binsize = int(f.headers['content-length'])
|
|
|
|
|
|
|
|
import tempfile
|
|
|
|
(fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc', dir = '/tmp')
|
|
|
|
|
|
|
|
o = os.fdopen(fd, 'w')
|
|
|
|
|
|
|
|
downloaded = 0
|
|
|
|
while 1:
|
|
|
|
#buf = f.read(BUFSIZE)
|
|
|
|
buf = f.read(16384)
|
|
|
|
if not buf: break
|
|
|
|
o.write(buf)
|
|
|
|
downloaded += len(buf)
|
|
|
|
if progress_meter:
|
|
|
|
completion = str(int((float(downloaded)/binsize)*100))
|
|
|
|
sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
|
|
|
|
sys.stdout.flush()
|
|
|
|
o.close()
|
|
|
|
|
|
|
|
if progress_meter:
|
|
|
|
sys.stdout.write('\n')
|
|
|
|
|
|
|
|
shutil.move(tmpfilename, targetfilename or filename)
|
|
|
|
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def dgst(file):
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
#if not os.path.exists(file):
|
|
|
|
#return None
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
import md5
|
|
|
|
s = md5.new()
|
2006-04-20 16:26:50 +02:00
|
|
|
f = open(file, 'r')
|
|
|
|
while 1:
|
|
|
|
buf = f.read(BUFSIZE)
|
|
|
|
if not buf: break
|
|
|
|
s.update(buf)
|
2006-05-19 22:13:29 +02:00
|
|
|
return s.hexdigest()
|
2007-07-13 18:01:02 +02:00
|
|
|
f.close()
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2006-06-08 12:30:29 +02:00
|
|
|
def binary(s):
|
|
|
|
"""return true if a string is binary data using diff's heuristic"""
|
|
|
|
if s and '\0' in s[:4096]:
|
|
|
|
return True
|
|
|
|
return False
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
|
2006-06-22 14:54:58 +02:00
|
|
|
def binary_file(fn):
|
|
|
|
"""read 4096 bytes from a file named fn, and call binary() on the data"""
|
|
|
|
return binary(open(fn, 'r').read(4096))
|
|
|
|
|
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
|
|
|
|
"""
|
|
|
|
This methods diffs oldfilename against filename (so filename will
|
|
|
|
be shown as the new file).
|
|
|
|
The variable origfilename is used if filename and oldfilename differ
|
|
|
|
in their names (for instance if a tempfile is used for filename etc.)
|
|
|
|
"""
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
import difflib
|
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
if not oldfilename:
|
|
|
|
oldfilename = filename
|
|
|
|
|
|
|
|
if not olddir:
|
|
|
|
olddir = os.path.join(dir, store)
|
|
|
|
|
|
|
|
if not origfilename:
|
|
|
|
origfilename = filename
|
2007-11-26 19:13:51 +01:00
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
file1 = os.path.join(olddir, oldfilename) # old/stored original
|
2006-05-19 22:28:04 +02:00
|
|
|
file2 = os.path.join(dir, filename) # working copy
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
f1 = open(file1, 'r')
|
2006-06-08 12:30:29 +02:00
|
|
|
s1 = f1.read()
|
2006-06-22 14:54:58 +02:00
|
|
|
f1.close()
|
|
|
|
|
|
|
|
f2 = open(file2, 'r')
|
2006-06-08 12:30:29 +02:00
|
|
|
s2 = f2.read()
|
2006-06-22 14:54:58 +02:00
|
|
|
f2.close()
|
2006-06-08 12:30:29 +02:00
|
|
|
|
|
|
|
if binary(s1) or binary (s2):
|
2007-07-04 14:55:26 +02:00
|
|
|
d = ['Binary file %s has changed\n' % origfilename]
|
2006-06-08 12:30:29 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
d = difflib.unified_diff(\
|
|
|
|
s1.splitlines(1), \
|
|
|
|
s2.splitlines(1), \
|
2007-07-04 14:55:26 +02:00
|
|
|
fromfile = '%s (revision %s)' % (origfilename, rev), \
|
|
|
|
tofile = '%s (working copy)' % origfilename)
|
2006-05-19 22:13:29 +02:00
|
|
|
|
2007-06-14 11:39:00 +02:00
|
|
|
# if file doesn't end with newline, we need to append one in the diff result
|
|
|
|
d = list(d)
|
|
|
|
for i, line in enumerate(d):
|
|
|
|
if not line.endswith('\n'):
|
|
|
|
d[i] += '\n\\ No newline at end of file'
|
|
|
|
if i+1 != len(d):
|
|
|
|
d[i] += '\n'
|
|
|
|
|
2006-05-19 22:13:29 +02:00
|
|
|
return ''.join(d)
|
|
|
|
|
2007-11-26 19:13:51 +01:00
|
|
|
def make_diff(wc, revision):
|
|
|
|
import tempfile
|
|
|
|
changed_files = []
|
|
|
|
added_files = []
|
|
|
|
removed_files = []
|
|
|
|
cmp_pac = None
|
|
|
|
diff_hdr = 'Index: %s\n'
|
|
|
|
diff_hdr += '===================================================================\n'
|
2007-12-13 12:31:31 +01:00
|
|
|
diff = []
|
2007-11-26 19:13:51 +01:00
|
|
|
if not revision:
|
|
|
|
# normal diff
|
2007-12-13 12:31:31 +01:00
|
|
|
if wc.todo:
|
|
|
|
for file in wc.todo:
|
|
|
|
if file in wc.filenamelist+wc.filenamelist_unvers:
|
|
|
|
state = wc.status(file)
|
|
|
|
if state == 'A':
|
|
|
|
added_files.append(file)
|
|
|
|
elif state == 'D':
|
|
|
|
removed_files.append(file)
|
|
|
|
elif state == 'M' or state == 'C':
|
|
|
|
changed_files.append(file)
|
|
|
|
else:
|
|
|
|
diff.append('osc: \'%s\' is not under version control' % file)
|
|
|
|
else:
|
|
|
|
for file in wc.filenamelist+wc.filenamelist_unvers:
|
|
|
|
state = wc.status(file)
|
|
|
|
if state == 'M' or state == 'C':
|
|
|
|
changed_files.append(file)
|
|
|
|
elif state == 'A':
|
|
|
|
added_files.append(file)
|
|
|
|
elif state == 'D':
|
|
|
|
removed_files.append(file)
|
2007-11-26 19:13:51 +01:00
|
|
|
else:
|
|
|
|
olddir = os.getcwd()
|
|
|
|
tmpdir = tempfile.mkdtemp(revision, wc.name, '/tmp')
|
|
|
|
os.chdir(tmpdir)
|
2008-02-18 14:44:20 +01:00
|
|
|
init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
|
2007-11-26 19:13:51 +01:00
|
|
|
cmp_pac = Package(tmpdir)
|
|
|
|
if wc.todo:
|
|
|
|
for file in wc.todo:
|
|
|
|
if file in cmp_pac.filenamelist:
|
2007-12-13 12:31:31 +01:00
|
|
|
if file in wc.filenamelist:
|
|
|
|
changed_files.append(file)
|
|
|
|
else:
|
|
|
|
diff.append('osc: \'%s\' is not under version control' % file)
|
|
|
|
else:
|
|
|
|
diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
|
2007-11-26 19:13:51 +01:00
|
|
|
else:
|
|
|
|
for file in wc.filenamelist+wc.filenamelist_unvers:
|
|
|
|
state = wc.status(file)
|
2007-12-13 12:31:31 +01:00
|
|
|
if state == 'A' and (not file in cmp_pac.filenamelist):
|
2007-11-26 19:13:51 +01:00
|
|
|
added_files.append(file)
|
2007-12-13 12:31:31 +01:00
|
|
|
elif file in cmp_pac.filenamelist and state == 'D':
|
2007-11-26 19:13:51 +01:00
|
|
|
removed_files.append(file)
|
2007-12-13 12:31:31 +01:00
|
|
|
elif state == ' ' and not file in cmp_pac.filenamelist:
|
2007-11-26 19:13:51 +01:00
|
|
|
added_files.append(file)
|
|
|
|
elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
|
|
|
|
if dgst(os.path.join(wc.absdir, file)) != cmp_pac.findfilebyname(file).md5:
|
|
|
|
changed_files.append(file)
|
|
|
|
for file in cmp_pac.filenamelist:
|
|
|
|
if not file in wc.filenamelist:
|
|
|
|
removed_files.append(file)
|
2007-12-13 12:31:31 +01:00
|
|
|
removed_files = set(removed_files)
|
2007-11-26 19:13:51 +01:00
|
|
|
|
|
|
|
for file in changed_files:
|
|
|
|
diff.append(diff_hdr % file)
|
|
|
|
if cmp_pac == None:
|
|
|
|
diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
|
|
|
|
else:
|
|
|
|
cmp_pac.updatefile(file, revision)
|
|
|
|
diff.append(get_source_file_diff(wc.absdir, file, revision, file,
|
|
|
|
cmp_pac.absdir, file))
|
|
|
|
(fd, tmpfile) = tempfile.mkstemp(dir='/tmp')
|
|
|
|
for file in added_files:
|
|
|
|
diff.append(diff_hdr % file)
|
|
|
|
if cmp_pac == None:
|
|
|
|
diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
|
|
|
|
os.path.dirname(tmpfile), file))
|
|
|
|
else:
|
|
|
|
diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
|
|
|
|
os.path.dirname(tmpfile), file))
|
|
|
|
|
|
|
|
# FIXME: this is ugly but it cannot be avoided atm
|
|
|
|
# if a file is deleted via "osc rm file" we should keep the storefile.
|
|
|
|
tmp_pac = None
|
|
|
|
if cmp_pac == None:
|
|
|
|
olddir = os.getcwd()
|
|
|
|
tmpdir = tempfile.mkdtemp(dir='/tmp')
|
|
|
|
os.chdir(tmpdir)
|
2008-02-18 14:44:20 +01:00
|
|
|
init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
|
2007-11-26 19:13:51 +01:00
|
|
|
tmp_pac = Package(tmpdir)
|
|
|
|
os.chdir(olddir)
|
|
|
|
|
|
|
|
for file in removed_files:
|
|
|
|
diff.append(diff_hdr % file)
|
|
|
|
if cmp_pac == None:
|
|
|
|
tmp_pac.updatefile(file, tmp_pac.rev)
|
|
|
|
diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
|
|
|
|
wc.rev, file, tmp_pac.storedir, file))
|
|
|
|
else:
|
|
|
|
cmp_pac.updatefile(file, revision)
|
|
|
|
diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
|
|
|
|
revision, file, cmp_pac.storedir, file))
|
|
|
|
|
|
|
|
os.chdir(olddir)
|
|
|
|
if cmp_pac != None:
|
|
|
|
delete_tmpdir(cmp_pac.absdir)
|
|
|
|
if tmp_pac != None:
|
|
|
|
delete_tmpdir(tmp_pac.absdir)
|
|
|
|
return diff
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2007-11-29 18:22:57 +01:00
|
|
|
|
|
|
|
def pretty_diff(apiurl,
|
|
|
|
old_project, old_package, old_revision,
|
|
|
|
new_project, new_package, new_revision):
|
|
|
|
|
2008-03-17 22:39:27 +01:00
|
|
|
query = {'cmd': 'diff'}
|
2007-11-29 18:22:57 +01:00
|
|
|
if old_project:
|
2008-03-17 22:39:27 +01:00
|
|
|
query['oproject'] = old_project
|
2007-11-29 18:22:57 +01:00
|
|
|
if old_package:
|
2008-03-17 22:39:27 +01:00
|
|
|
query['opackage'] = old_package
|
2007-11-29 18:22:57 +01:00
|
|
|
if old_revision:
|
2008-03-17 22:39:27 +01:00
|
|
|
query['orev'] = old_revision
|
2007-11-29 18:22:57 +01:00
|
|
|
if new_revision:
|
2008-03-17 22:39:27 +01:00
|
|
|
query['rev'] = new_revision
|
2007-11-29 18:22:57 +01:00
|
|
|
|
|
|
|
u = makeurl(apiurl, ['source', new_project, new_package], query=query)
|
|
|
|
|
|
|
|
f = http_POST(u)
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def make_dir(apiurl, project, package, pathname):
|
2006-04-20 16:26:50 +02:00
|
|
|
#print "creating directory '%s'" % project
|
|
|
|
if not os.path.exists(project):
|
2006-05-22 10:16:31 +02:00
|
|
|
print statfrmt('A', project)
|
2007-11-29 13:54:07 +01:00
|
|
|
init_project_dir(apiurl, project, project)
|
2006-04-20 16:26:50 +02:00
|
|
|
#print "creating directory '%s/%s'" % (project, package)
|
2008-03-10 19:04:23 +01:00
|
|
|
if not pathname:
|
|
|
|
pathname = os.path.join(project, package)
|
2006-04-20 16:26:50 +02:00
|
|
|
if not os.path.exists(os.path.join(project, package)):
|
2008-03-10 19:04:23 +01:00
|
|
|
print statfrmt('A', pathname)
|
2006-04-20 16:26:50 +02:00
|
|
|
os.mkdir(os.path.join(project, package))
|
|
|
|
os.mkdir(os.path.join(project, package, store))
|
|
|
|
|
|
|
|
return(os.path.join(project, package))
|
|
|
|
|
|
|
|
|
2008-03-24 21:24:49 +01:00
|
|
|
def checkout_package(apiurl, project, package,
|
|
|
|
revision=None, pathname=None, prj_obj=None,
|
|
|
|
expand_link=False):
|
2006-04-20 16:26:50 +02:00
|
|
|
olddir = os.getcwd()
|
2008-03-10 19:04:23 +01:00
|
|
|
|
|
|
|
if not pathname:
|
|
|
|
pathname = os.path.join(project, package)
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2007-10-27 22:19:12 +02:00
|
|
|
path = (quote_plus(project), quote_plus(package))
|
2008-02-18 14:44:20 +01:00
|
|
|
if meta_exists(metatype='pkg', path_args=path, create_new=False, apiurl=apiurl) == None:
|
2008-03-10 19:04:23 +01:00
|
|
|
print >>sys.stderr, 'error 404 - project or package does not exist'
|
2007-10-27 22:19:12 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2008-03-24 21:24:49 +01:00
|
|
|
if expand_link:
|
|
|
|
# try to read from the linkinfo
|
|
|
|
x = show_upstream_xsrcmd5(apiurl, project, package)
|
|
|
|
if x:
|
|
|
|
# it is a link - thus, we use the xsrcmd5 as the revision to be
|
|
|
|
# checked out
|
|
|
|
revision = x
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
os.chdir(make_dir(apiurl, project, package, pathname))
|
2007-07-04 14:55:26 +02:00
|
|
|
init_package_dir(apiurl, project, package, store, revision)
|
2008-03-10 19:04:23 +01:00
|
|
|
os.chdir(os.pardir)
|
|
|
|
p = Package(package)
|
2006-05-22 10:16:31 +02:00
|
|
|
|
|
|
|
for filename in p.filenamelist:
|
2007-07-04 14:55:26 +02:00
|
|
|
p.updatefile(filename, revision)
|
2008-03-10 19:04:23 +01:00
|
|
|
#print 'A ', os.path.join(project, package, filename)
|
|
|
|
print statfrmt('A', os.path.join(pathname, filename))
|
|
|
|
if conf.config['do_package_tracking']:
|
|
|
|
# check if we can re-use an existing project object
|
|
|
|
if prj_obj == None:
|
|
|
|
prj_obj = Project(os.getcwd())
|
|
|
|
prj_obj.set_state(p.name, ' ')
|
|
|
|
prj_obj.write_packages()
|
2006-04-20 16:26:50 +02:00
|
|
|
os.chdir(olddir)
|
|
|
|
|
|
|
|
|
2007-11-09 19:03:19 +01:00
|
|
|
def replace_pkg_meta(pkgmeta, new_name, new_prj):
|
|
|
|
"""
|
|
|
|
update pkgmeta with new new_name and new_prj and set calling user as the
|
|
|
|
only maintainer
|
|
|
|
"""
|
|
|
|
root = ET.fromstring(''.join(pkgmeta))
|
|
|
|
root.set('name', new_name)
|
|
|
|
root.set('project', new_prj)
|
|
|
|
for person in root.findall('person'):
|
|
|
|
root.remove(person)
|
|
|
|
ET.SubElement(root, 'person',
|
|
|
|
userid = conf.config['user'], role = 'maintainer')
|
|
|
|
return ET.tostring(root)
|
|
|
|
|
2006-08-07 12:08:54 +02:00
|
|
|
def link_pac(src_project, src_package, dst_project, dst_package):
|
|
|
|
"""
|
|
|
|
create a linked package
|
|
|
|
- "src" is the original package
|
|
|
|
- "dst" is the "link" package that we are creating here
|
|
|
|
"""
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
|
2007-11-09 19:03:19 +01:00
|
|
|
src_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
|
2006-08-07 12:08:54 +02:00
|
|
|
|
2007-07-16 19:06:06 +02:00
|
|
|
edit_meta('pkg',
|
2007-07-16 18:20:09 +02:00
|
|
|
path_args=(dst_project, dst_package),
|
2007-07-16 19:06:06 +02:00
|
|
|
data=src_meta)
|
2006-08-07 12:08:54 +02:00
|
|
|
|
|
|
|
# create the _link file
|
|
|
|
# but first, make sure not to overwrite an existing one
|
2007-05-04 23:51:54 +02:00
|
|
|
if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
|
2007-04-25 23:10:49 +02:00
|
|
|
print >>sys.stderr
|
|
|
|
print >>sys.stderr, '_link file already exists...! Aborting'
|
2006-08-07 12:08:54 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
print 'Creating _link...',
|
|
|
|
link_template = """\
|
|
|
|
<link project="%s" package="%s">
|
|
|
|
<patches>
|
|
|
|
<!-- <apply name="patch" /> -->
|
|
|
|
<!-- <topadd>%%define build_with_feature_x 1</topadd> -->
|
|
|
|
</patches>
|
|
|
|
</link>
|
|
|
|
""" % (src_project, src_package)
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
|
2007-04-19 10:47:22 +02:00
|
|
|
http_PUT(u, data=link_template)
|
2006-08-07 12:08:54 +02:00
|
|
|
print 'Done.'
|
|
|
|
|
2007-10-30 14:45:54 +01:00
|
|
|
def aggregate_pac(src_project, src_package, dst_project, dst_package):
|
|
|
|
"""
|
|
|
|
aggregate package
|
|
|
|
- "src" is the original package
|
|
|
|
- "dst" is the "aggregate" package that we are creating here
|
|
|
|
"""
|
|
|
|
|
|
|
|
src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
|
2007-11-09 19:03:19 +01:00
|
|
|
src_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
|
2007-10-30 14:45:54 +01:00
|
|
|
|
|
|
|
edit_meta('pkg',
|
|
|
|
path_args=(dst_project, dst_package),
|
|
|
|
data=src_meta)
|
|
|
|
|
|
|
|
# create the _aggregate file
|
|
|
|
# but first, make sure not to overwrite an existing one
|
|
|
|
if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
|
|
|
|
print >>sys.stderr
|
|
|
|
print >>sys.stderr, '_aggregate file already exists...! Aborting'
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
print 'Creating _aggregate...',
|
|
|
|
aggregate_template = """\
|
|
|
|
<aggregatelist>
|
|
|
|
<aggregate project="%s">
|
|
|
|
<package>%s</package>
|
|
|
|
</aggregate>
|
|
|
|
</aggregatelist>
|
|
|
|
""" % (src_project, src_package)
|
|
|
|
|
|
|
|
u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
|
|
|
|
http_PUT(u, data=aggregate_template)
|
|
|
|
print 'Done.'
|
2006-08-07 12:08:54 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def copy_pac(src_apiurl, src_project, src_package,
|
2008-03-17 22:46:42 +01:00
|
|
|
dst_apiurl, dst_project, dst_package,
|
|
|
|
server_side = False):
|
2006-09-21 16:33:24 +02:00
|
|
|
"""
|
2008-03-17 22:46:42 +01:00
|
|
|
Create a copy of a package.
|
2006-09-21 16:33:24 +02:00
|
|
|
|
2008-03-17 22:46:42 +01:00
|
|
|
Copying can be done by downloading the files from one package and commit
|
|
|
|
them into the other by uploading them (client-side copy) --
|
|
|
|
or by the server, in a single api call.
|
|
|
|
|
|
|
|
"""
|
2006-09-21 16:33:24 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
src_meta = show_package_meta(src_apiurl, src_project, src_package)
|
2007-11-09 19:03:19 +01:00
|
|
|
src_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
|
2006-09-21 16:33:24 +02:00
|
|
|
|
|
|
|
print 'Sending meta data...'
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
|
2007-04-19 10:47:22 +02:00
|
|
|
http_PUT(u, data=src_meta)
|
2006-09-21 16:33:24 +02:00
|
|
|
|
|
|
|
print 'Copying files...'
|
2008-03-17 22:46:42 +01:00
|
|
|
if server_side:
|
|
|
|
query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
|
|
|
|
u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
|
|
|
|
f = http_POST(u)
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
else:
|
|
|
|
# copy one file after the other
|
|
|
|
import tempfile
|
|
|
|
tmpdir = tempfile.mkdtemp(prefix='osc_copypac', dir='/tmp')
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
for n in meta_get_filelist(src_apiurl, src_project, src_package):
|
|
|
|
print ' ', n
|
|
|
|
get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n)
|
|
|
|
u = makeurl(dst_apiurl, ['source', dst_project, dst_package, pathname2url(n)])
|
|
|
|
http_PUT(u, file = n)
|
|
|
|
os.unlink(n)
|
|
|
|
os.rmdir(tmpdir)
|
|
|
|
return 'Done.'
|
2006-09-21 16:33:24 +02:00
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def delete_package(apiurl, prj, pac):
|
|
|
|
u = makeurl(apiurl, ['source', prj, pac])
|
2007-07-23 18:31:02 +02:00
|
|
|
try:
|
|
|
|
http_DELETE(u)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code == 404:
|
|
|
|
print >>sys.stderr, 'Package \'%s\' does not exist' % pac
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'an unexpected error occured while deleting ' \
|
|
|
|
'\'%s\'' % pac
|
|
|
|
sys.exit(1)
|
2006-08-11 12:37:29 +02:00
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def delete_project(apiurl, prj):
|
|
|
|
u = makeurl(apiurl, ['source', prj])
|
2007-07-23 18:31:02 +02:00
|
|
|
try:
|
|
|
|
http_DELETE(u)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code == 404:
|
|
|
|
print >>sys.stderr, 'Package \'%s\' does not exist' % pac
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'an unexpected error occured while deleting ' \
|
|
|
|
'\'%s\'' % pac
|
|
|
|
sys.exit(1)
|
2006-09-25 17:11:03 +02:00
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_platforms(apiurl):
|
|
|
|
f = http_GET(makeurl(apiurl, ['platform']))
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(f)
|
2006-06-26 17:11:22 +02:00
|
|
|
r = [ node.get('name') for node in tree.getroot() ]
|
2006-05-02 10:17:45 +02:00
|
|
|
r.sort()
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_platforms_of_project(apiurl, prj):
|
|
|
|
f = show_project_meta(apiurl, prj)
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2006-06-26 17:11:22 +02:00
|
|
|
r = [ node.get('name') for node in tree.findall('repository')]
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_repos_of_project(apiurl, prj):
|
|
|
|
f = show_project_meta(apiurl, prj)
|
2006-07-05 12:25:31 +02:00
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
|
|
|
|
|
|
|
repo_line_templ = '%-15s %-10s'
|
|
|
|
for node in tree.findall('repository'):
|
|
|
|
for node2 in node.findall('arch'):
|
2008-01-07 15:54:31 +01:00
|
|
|
yield repo_line_templ % (node.get('name'), node2.text)
|
2006-07-05 12:25:31 +02:00
|
|
|
|
|
|
|
|
2007-08-16 12:28:32 +02:00
|
|
|
def get_binarylist(apiurl, prj, repo, arch, package=None):
|
2007-08-16 19:12:46 +02:00
|
|
|
what = package or '_repository'
|
|
|
|
u = makeurl(apiurl, ['build', prj, repo, arch, what])
|
2007-08-16 12:28:32 +02:00
|
|
|
f = http_GET(u)
|
2007-08-16 19:12:46 +02:00
|
|
|
tree = ET.parse(f)
|
2007-08-16 12:28:32 +02:00
|
|
|
r = [ node.get('filename') for node in tree.findall('binary')]
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2007-08-16 19:12:46 +02:00
|
|
|
def get_binarylist_published(apiurl, prj, repo, arch):
|
|
|
|
u = makeurl(apiurl, ['published', prj, repo, arch])
|
|
|
|
f = http_GET(u)
|
|
|
|
tree = ET.parse(f)
|
|
|
|
r = [ node.get('name') for node in tree.findall('entry')]
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2007-08-11 14:29:38 +02:00
|
|
|
def show_results_meta(apiurl, prj, package=None):
|
|
|
|
query = []
|
|
|
|
if package:
|
|
|
|
query.append('package=%s' % pathname2url(package))
|
|
|
|
u = makeurl(apiurl, ['build', prj, '_result'], query=query)
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-04-20 16:26:50 +02:00
|
|
|
return f.readlines()
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def show_prj_results_meta(apiurl, prj):
|
|
|
|
u = makeurl(apiurl, ['build', prj, '_result'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-09-25 17:11:03 +02:00
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_results(apiurl, prj, package):
|
2006-04-29 22:44:09 +02:00
|
|
|
r = []
|
2006-05-11 09:27:50 +02:00
|
|
|
result_line_templ = '%(rep)-15s %(arch)-10s %(status)s'
|
2006-04-29 22:44:09 +02:00
|
|
|
|
2007-08-11 14:29:38 +02:00
|
|
|
f = show_results_meta(apiurl, prj, package=package)
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
|
|
|
root = tree.getroot()
|
|
|
|
|
2007-03-13 00:21:34 +01:00
|
|
|
for node in root.findall('result'):
|
|
|
|
rmap = {}
|
|
|
|
rmap['prj'] = prj
|
|
|
|
rmap['pac'] = package
|
|
|
|
rmap['rep'] = node.get('repository')
|
2006-05-02 10:17:45 +02:00
|
|
|
rmap['arch'] = node.get('arch')
|
|
|
|
|
|
|
|
statusnode = node.find('status')
|
2007-07-19 09:42:19 +02:00
|
|
|
try:
|
|
|
|
rmap['status'] = statusnode.get('code')
|
|
|
|
except:
|
|
|
|
# code can be missing when package is too new:
|
|
|
|
return {}
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-06-01 11:05:47 +02:00
|
|
|
if rmap['status'] in ['expansion error', 'broken']:
|
2007-03-13 00:21:34 +01:00
|
|
|
rmap['status'] += ': ' + statusnode.find('details').text
|
2006-05-02 10:17:45 +02:00
|
|
|
|
|
|
|
if rmap['status'] == 'failed':
|
2008-03-27 01:36:17 +01:00
|
|
|
rmap['status'] += ': %s' % apiurl + \
|
2006-05-11 09:27:50 +02:00
|
|
|
'/result/%(prj)s/%(rep)s/%(pac)s/%(arch)s/log' % rmap
|
2006-04-29 22:44:09 +02:00
|
|
|
|
|
|
|
r.append(result_line_templ % rmap)
|
|
|
|
return r
|
|
|
|
|
2008-02-12 13:45:06 +01:00
|
|
|
def get_prj_results(apiurl, prj, show_legend=False, csv=False):
|
2006-09-25 17:11:03 +02:00
|
|
|
#print '----------------------------------------'
|
|
|
|
|
|
|
|
r = []
|
|
|
|
#result_line_templ = '%(prj)-15s %(pac)-15s %(rep)-15s %(arch)-10s %(status)s'
|
|
|
|
result_line_templ = '%(rep)-15s %(arch)-10s %(status)s'
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
f = show_prj_results_meta(apiurl, prj)
|
2006-09-25 17:11:03 +02:00
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
|
|
|
root = tree.getroot()
|
|
|
|
|
|
|
|
pacs = []
|
2008-02-12 13:45:06 +01:00
|
|
|
# sequence of (repo,arch) tuples
|
|
|
|
targets = []
|
|
|
|
# {package: {(repo,arch): status}}
|
|
|
|
status = {}
|
2007-06-26 16:32:49 +02:00
|
|
|
if not root.find('result'):
|
|
|
|
return []
|
2007-03-13 01:17:28 +01:00
|
|
|
for node in root.find('result'):
|
|
|
|
pacs.append(node.get('package'))
|
2007-03-29 00:55:38 +02:00
|
|
|
pacs.sort()
|
2008-02-12 13:45:06 +01:00
|
|
|
for node in root.findall('result'):
|
|
|
|
tg = (node.get('repository'), node.get('arch'))
|
|
|
|
targets.append(tg)
|
|
|
|
for pacnode in node.findall('status'):
|
|
|
|
pac = pacnode.get('package')
|
|
|
|
if pac not in status:
|
|
|
|
status[pac] = {}
|
|
|
|
status[pac][tg] = pacnode.get('code')
|
|
|
|
targets.sort()
|
|
|
|
|
|
|
|
# csv output
|
|
|
|
if csv:
|
|
|
|
# TODO: option to disable the table header
|
|
|
|
row = ['_'] + ['/'.join(tg) for tg in targets]
|
2008-03-13 12:13:22 +01:00
|
|
|
r.append(';'.join(row))
|
2008-02-12 13:45:06 +01:00
|
|
|
for pac in pacs:
|
|
|
|
row = [pac] + [status[pac][tg] for tg in targets]
|
2008-03-13 12:13:22 +01:00
|
|
|
r.append(';'.join(row))
|
2008-02-12 13:45:06 +01:00
|
|
|
return r
|
2006-09-29 11:55:27 +02:00
|
|
|
|
2008-02-12 13:45:06 +01:00
|
|
|
# human readable output
|
2006-09-29 11:55:27 +02:00
|
|
|
max_pacs = 40
|
|
|
|
for startpac in range(0, len(pacs), max_pacs):
|
|
|
|
offset = 0
|
|
|
|
for pac in pacs[startpac:startpac+max_pacs]:
|
|
|
|
r.append(' |' * offset + ' ' + pac)
|
|
|
|
offset += 1
|
|
|
|
|
2008-02-12 13:45:06 +01:00
|
|
|
for tg in targets:
|
2006-09-29 11:55:27 +02:00
|
|
|
line = []
|
2006-09-25 17:11:03 +02:00
|
|
|
line.append(' ')
|
2006-09-29 11:55:27 +02:00
|
|
|
for pac in pacs[startpac:startpac+max_pacs]:
|
2008-02-12 13:45:06 +01:00
|
|
|
st = ''
|
|
|
|
if not status.has_key(pac) or not status[pac].has_key(tg):
|
|
|
|
# for newly added packages, status may be missing
|
|
|
|
st = '?'
|
|
|
|
try:
|
|
|
|
st = buildstatus_symbols[status[pac][tg]]
|
|
|
|
except:
|
|
|
|
print 'osc: warn: unknown status \'%s\'...' % status[pac][tg]
|
|
|
|
print 'please edit osc/core.py, and extend the buildstatus_symbols dictionary.'
|
|
|
|
st = '?'
|
|
|
|
line.append(st)
|
2006-09-29 11:55:27 +02:00
|
|
|
line.append(' ')
|
2008-02-12 13:45:06 +01:00
|
|
|
line.append(' %s %s' % tg)
|
2006-09-29 11:55:27 +02:00
|
|
|
line = ''.join(line)
|
|
|
|
|
|
|
|
r.append(line)
|
2006-09-25 17:11:03 +02:00
|
|
|
|
2006-09-29 11:55:27 +02:00
|
|
|
r.append('')
|
2006-09-25 17:11:03 +02:00
|
|
|
|
2007-04-25 01:00:12 +02:00
|
|
|
if show_legend:
|
|
|
|
r.append(' Legend:')
|
|
|
|
for i, j in buildstatus_symbols.items():
|
|
|
|
r.append(' %s %s' % (j, i))
|
2006-09-25 17:11:03 +02:00
|
|
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2008-01-03 23:10:16 +01:00
|
|
|
def get_buildlog(apiurl, prj, package, platform, arch, offset):
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(apiurl, ['build', prj, platform, arch, package, '_log?nostream=1&start=%s' % offset])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-06-16 14:40:26 +02:00
|
|
|
return f.read()
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2008-01-03 23:10:16 +01:00
|
|
|
def print_buildlog(apiurl, prj, package, platform, arch, offset = 0):
|
|
|
|
"""prints out the buildlog on stdout"""
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
log_chunk = get_buildlog(apiurl, prj, package, platform, arch, offset)
|
|
|
|
if len(log_chunk) == 0:
|
|
|
|
break
|
|
|
|
offset += len(log_chunk)
|
|
|
|
print log_chunk.strip()
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print >>sys.stderr, 'Can\'t get logfile'
|
|
|
|
print >>sys.stderr, e
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2007-05-09 11:36:55 +02:00
|
|
|
def get_buildinfo(apiurl, prj, package, platform, arch, specfile=None, addlist=None):
|
|
|
|
query = []
|
|
|
|
if addlist:
|
|
|
|
for i in addlist:
|
|
|
|
query.append('add=%s' % quote_plus(i))
|
|
|
|
|
|
|
|
u = makeurl(apiurl, ['build', prj, platform, arch, package, '_buildinfo'], query=query)
|
|
|
|
|
2007-04-25 01:00:12 +02:00
|
|
|
if specfile:
|
|
|
|
f = http_POST(u, data=specfile)
|
|
|
|
else:
|
|
|
|
f = http_GET(u)
|
2006-07-05 12:25:31 +02:00
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_buildconfig(apiurl, prj, package, platform, arch):
|
|
|
|
u = makeurl(apiurl, ['build', prj, platform, '_buildconfig'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-07-05 12:25:31 +02:00
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def get_buildhistory(apiurl, prj, package, platform, arch):
|
2006-09-25 17:11:03 +02:00
|
|
|
import time
|
2007-05-04 23:51:54 +02:00
|
|
|
u = makeurl(apiurl, ['build', prj, platform, arch, package, '_history'])
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_GET(u)
|
2006-09-25 17:11:03 +02:00
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
r = []
|
|
|
|
for node in root.findall('entry'):
|
|
|
|
rev = int(node.get('rev'))
|
|
|
|
srcmd5 = node.get('srcmd5')
|
|
|
|
versrel = node.get('versrel')
|
|
|
|
bcnt = int(node.get('bcnt'))
|
2006-10-10 16:04:34 +02:00
|
|
|
t = time.localtime(int(node.get('time')))
|
2006-09-25 17:11:03 +02:00
|
|
|
t = time.strftime('%Y-%m-%d %H:%M:%S', t)
|
|
|
|
|
|
|
|
r.append('%s %s %6d %2d %s' % (t, srcmd5, rev, bcnt, versrel))
|
|
|
|
|
|
|
|
r.insert(0, 'time srcmd5 rev bcnt vers-rel')
|
|
|
|
|
|
|
|
return r
|
2006-05-19 22:13:29 +02:00
|
|
|
|
|
|
|
|
2007-07-12 02:48:52 +02:00
|
|
|
def get_commitlog(apiurl, prj, package, revision):
|
2007-07-13 12:02:35 +02:00
|
|
|
import time, locale
|
2007-07-12 01:24:26 +02:00
|
|
|
u = makeurl(apiurl, ['source', prj, package, '_history'])
|
|
|
|
f = http_GET(u)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
r = []
|
|
|
|
revisions = root.findall('revision')
|
|
|
|
revisions.reverse()
|
|
|
|
for node in revisions:
|
2007-07-12 02:48:52 +02:00
|
|
|
try:
|
|
|
|
rev = int(node.get('rev'))
|
|
|
|
#vrev = int(node.get('vrev')) # what is the meaning of vrev?
|
|
|
|
if revision and rev != int(revision):
|
|
|
|
continue
|
|
|
|
except ValueError:
|
|
|
|
# this part should _never_ be reached but...
|
|
|
|
return [ 'an unexpected error occured - please file a bug' ]
|
2007-07-12 01:24:26 +02:00
|
|
|
srcmd5 = node.find('srcmd5').text
|
|
|
|
version = node.find('version').text
|
|
|
|
user = node.find('user').text
|
|
|
|
try:
|
2007-07-13 12:02:35 +02:00
|
|
|
comment = node.find('comment').text.encode(locale.getpreferredencoding(), 'replace')
|
2007-07-12 01:24:26 +02:00
|
|
|
except:
|
|
|
|
comment = '<no message>'
|
|
|
|
t = time.localtime(int(node.find('time').text))
|
|
|
|
t = time.strftime('%Y-%m-%d %H:%M:%S', t)
|
|
|
|
|
|
|
|
s = '-' * 76 + \
|
|
|
|
'\nr%s | %s | %s | %s | %s\n' % (rev, user, t, srcmd5, version) + \
|
|
|
|
'\n' + comment
|
|
|
|
r.append(s)
|
|
|
|
|
|
|
|
r.append('-' * 76)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2007-06-14 16:30:36 +02:00
|
|
|
def rebuild(apiurl, prj, package, repo, arch, code=None):
|
2007-05-09 11:36:55 +02:00
|
|
|
query = []
|
|
|
|
query.append('cmd=rebuild')
|
2007-04-25 01:00:12 +02:00
|
|
|
if package:
|
2007-05-09 11:36:55 +02:00
|
|
|
query.append('package=%s' % quote_plus(package))
|
2006-09-25 17:11:03 +02:00
|
|
|
if repo:
|
2007-05-14 12:13:47 +02:00
|
|
|
query.append('repository=%s' % quote_plus(repo))
|
2006-09-25 17:11:03 +02:00
|
|
|
if arch:
|
2007-05-09 11:36:55 +02:00
|
|
|
query.append('arch=%s' % quote_plus(arch))
|
2007-04-25 01:00:12 +02:00
|
|
|
if code:
|
2007-05-09 11:36:55 +02:00
|
|
|
query.append('code=%s' % quote_plus(code))
|
2007-04-25 01:00:12 +02:00
|
|
|
|
2007-05-09 11:36:55 +02:00
|
|
|
u = makeurl(apiurl, ['build', prj], query=query)
|
2006-06-26 17:11:22 +02:00
|
|
|
try:
|
2007-04-19 10:47:22 +02:00
|
|
|
f = http_POST(u)
|
2006-06-26 17:11:22 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print >>sys.stderr, 'could not trigger rebuild for project \'%s\' package \'%s\'' % (prj, package)
|
|
|
|
print >>sys.stderr, u
|
|
|
|
print >>sys.stderr, e
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
root = ET.parse(f).getroot()
|
2007-04-25 01:00:12 +02:00
|
|
|
return root.get('code')
|
2006-06-08 12:30:29 +02:00
|
|
|
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def store_read_project(dir):
|
2007-07-12 02:48:52 +02:00
|
|
|
try:
|
|
|
|
p = open(os.path.join(dir, store, '_project')).readlines()[0].strip()
|
|
|
|
except IOError:
|
|
|
|
print >>sys.stderr, 'error: \'%s\' is not an osc project dir ' \
|
|
|
|
'or working copy' % dir
|
|
|
|
sys.exit(1)
|
2006-04-20 16:26:50 +02:00
|
|
|
return p
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def store_read_package(dir):
|
2007-07-12 02:48:52 +02:00
|
|
|
try:
|
|
|
|
p = open(os.path.join(dir, store, '_package')).readlines()[0].strip()
|
|
|
|
except IOError:
|
|
|
|
print >>sys.stderr, 'error: \'%s\' is not an osc working copy' % dir
|
|
|
|
sys.exit(1)
|
2006-04-20 16:26:50 +02:00
|
|
|
return p
|
|
|
|
|
2007-05-04 23:51:54 +02:00
|
|
|
def store_read_apiurl(dir):
|
|
|
|
fname = os.path.join(dir, store, '_apiurl')
|
|
|
|
try:
|
|
|
|
apiurl = open(fname).readlines()[0].strip()
|
|
|
|
except:
|
|
|
|
apiurl = conf.config['scheme'] + '://' + conf.config['apisrv']
|
|
|
|
#store_write_apiurl(dir, apiurl)
|
|
|
|
return apiurl
|
|
|
|
|
|
|
|
def store_write_project(dir, project):
|
|
|
|
fname = os.path.join(dir, store, '_project')
|
|
|
|
open(fname, 'w').write(project + '\n')
|
|
|
|
|
|
|
|
def store_write_apiurl(dir, apiurl):
|
|
|
|
fname = os.path.join(dir, store, '_apiurl')
|
|
|
|
open(fname, 'w').write(apiurl + '\n')
|
2006-08-11 12:37:29 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def store_write_initial_packages(dir, project, subelements):
|
|
|
|
fname = os.path.join(dir, store, '_packages')
|
|
|
|
root = ET.Element('project', name=project)
|
|
|
|
for elem in subelements:
|
|
|
|
root.append(elem)
|
|
|
|
ET.ElementTree(root).write(fname)
|
|
|
|
|
2006-05-23 10:43:04 +02:00
|
|
|
def get_osc_version():
|
|
|
|
return __version__
|
2006-04-20 16:26:50 +02:00
|
|
|
|
2007-06-14 16:30:36 +02:00
|
|
|
|
|
|
|
def abortbuild(apiurl, project, package=None, arch=None, repo=None):
|
|
|
|
query = []
|
|
|
|
query.append('cmd=abortbuild')
|
|
|
|
if package:
|
|
|
|
query.append('package=%s' % quote_plus(package))
|
|
|
|
if arch:
|
|
|
|
query.append('arch=%s' % quote_plus(arch))
|
|
|
|
if repo:
|
|
|
|
query.append('repository=%s' % quote_plus(repo))
|
|
|
|
u = makeurl(apiurl, ['build', project], query)
|
|
|
|
try:
|
|
|
|
f = http_POST(u)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
err_str = 'abortion failed for project %s' % project
|
|
|
|
if package:
|
|
|
|
err_str += ' package %s' % package
|
|
|
|
if arch:
|
|
|
|
err_str += ' arch %s' % arch
|
|
|
|
if repo:
|
|
|
|
err_str += ' repo %s' % repo
|
|
|
|
print >> sys.stderr, err_str
|
|
|
|
print >> sys.stderr, u
|
|
|
|
print >> sys.stderr, e
|
|
|
|
sys.exit(1)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
return root.get('code')
|
|
|
|
|
|
|
|
|
2007-10-27 21:13:24 +02:00
|
|
|
def wipebinaries(apiurl, project, package=None, arch=None, repo=None, code=None):
|
2007-06-14 16:30:36 +02:00
|
|
|
query = []
|
|
|
|
query.append('cmd=wipe')
|
|
|
|
if package:
|
|
|
|
query.append('package=%s' % quote_plus(package))
|
|
|
|
if arch:
|
|
|
|
query.append('arch=%s' % quote_plus(arch))
|
|
|
|
if repo:
|
|
|
|
query.append('repository=%s' % quote_plus(repo))
|
2007-10-27 21:13:24 +02:00
|
|
|
if code:
|
|
|
|
query.append('code=%s' % quote_plus(code))
|
2007-06-14 16:30:36 +02:00
|
|
|
|
|
|
|
u = makeurl(apiurl, ['build', project], query)
|
|
|
|
try:
|
|
|
|
f = http_POST(u)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
err_str = 'wipe binary rpms failed for project %s' % project
|
|
|
|
if package:
|
|
|
|
err_str += ' package %s' % package
|
|
|
|
if arch:
|
|
|
|
err_str += ' arch %s' % arch
|
|
|
|
if repo:
|
|
|
|
err_str += ' repository %s' % repo
|
2007-10-27 21:13:24 +02:00
|
|
|
if code:
|
|
|
|
err_str += ' code=%s' % code
|
2007-06-14 16:30:36 +02:00
|
|
|
print >> sys.stderr, err_str
|
|
|
|
print >> sys.stderr, u
|
|
|
|
print >> sys.stderr, e
|
|
|
|
sys.exit(1)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
return root.get('code')
|
2007-07-04 14:55:26 +02:00
|
|
|
|
2007-07-12 01:24:26 +02:00
|
|
|
|
2007-07-04 14:55:26 +02:00
|
|
|
def parseRevisionOption(string):
|
|
|
|
"""
|
2007-07-04 15:48:24 +02:00
|
|
|
returns a tuple which contains the revisions
|
2007-07-04 14:55:26 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if string:
|
|
|
|
if ':' in string:
|
|
|
|
splitted_rev = string.split(':')
|
|
|
|
try:
|
|
|
|
for i in splitted_rev:
|
|
|
|
int(i)
|
|
|
|
return splitted_rev
|
|
|
|
except ValueError:
|
|
|
|
print >>sys.stderr, 'your revision \'%s\' will be ignored' % string
|
|
|
|
return None, None
|
|
|
|
else:
|
|
|
|
if string.isdigit():
|
|
|
|
return string, None
|
2008-03-22 17:35:22 +01:00
|
|
|
elif string.isalnum() and len(string) == 32:
|
|
|
|
# could be an md5sum
|
|
|
|
return string, None
|
2007-07-04 14:55:26 +02:00
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'your revision \'%s\' will be ignored' % string
|
|
|
|
return None, None
|
|
|
|
else:
|
|
|
|
return None, None
|
2007-07-06 13:54:34 +02:00
|
|
|
|
2008-02-18 14:44:20 +01:00
|
|
|
def checkRevision(prj, pac, revision, apiurl=None):
|
2007-07-06 13:54:34 +02:00
|
|
|
"""
|
2008-03-22 17:35:22 +01:00
|
|
|
check if revision is valid revision, i.e. it is not
|
|
|
|
larger than the upstream revision id
|
2007-07-06 13:54:34 +02:00
|
|
|
"""
|
2008-03-22 17:35:22 +01:00
|
|
|
if len(revision) == 32:
|
|
|
|
# there isn't a way to check this kind of revision for validity
|
|
|
|
return True
|
2008-02-18 14:44:20 +01:00
|
|
|
if not apiurl:
|
|
|
|
apiurl = conf.config['apiurl']
|
2007-07-06 17:54:10 +02:00
|
|
|
try:
|
2008-02-18 14:44:20 +01:00
|
|
|
if int(revision) > int(show_upstream_rev(apiurl, prj, pac)) \
|
2007-07-12 02:48:52 +02:00
|
|
|
or int(revision) <= 0:
|
2007-07-06 17:54:10 +02:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2007-07-07 19:13:37 +02:00
|
|
|
except (ValueError, TypeError):
|
2007-07-06 13:54:34 +02:00
|
|
|
return False
|
2007-07-15 15:49:13 +02:00
|
|
|
|
|
|
|
def build_xpath_predicate(search_list, search_term, exact_matches):
|
|
|
|
"""
|
|
|
|
Builds and returns a xpath predicate
|
|
|
|
"""
|
|
|
|
|
|
|
|
predicate = ['[']
|
|
|
|
for i, elem in enumerate(search_list):
|
|
|
|
if i > 0 and i < len(search_list):
|
|
|
|
predicate.append(' or ')
|
|
|
|
if exact_matches:
|
|
|
|
predicate.append('%s=\'%s\'' % (elem, search_term))
|
|
|
|
else:
|
|
|
|
predicate.append('contains(%s, \'%s\')' % (elem, search_term))
|
|
|
|
predicate.append(']')
|
|
|
|
return predicate
|
|
|
|
|
|
|
|
def build_table(col_num, data = [], headline = [], width=1):
|
|
|
|
"""
|
|
|
|
This method builds a simple table.
|
|
|
|
Example1: build_table(2, ['foo', 'bar', 'suse', 'osc'], ['col1', 'col2'], 2)
|
|
|
|
col1 col2
|
|
|
|
foo bar
|
|
|
|
suse osc
|
|
|
|
"""
|
|
|
|
|
|
|
|
longest_col = []
|
|
|
|
for i in range(col_num):
|
|
|
|
longest_col.append(0)
|
|
|
|
if headline:
|
|
|
|
data[0:0] = headline
|
|
|
|
# find longest entry in each column
|
|
|
|
i = 0
|
|
|
|
for itm in data:
|
|
|
|
if longest_col[i] < len(itm):
|
|
|
|
longest_col[i] = len(itm)
|
|
|
|
if i == col_num - 1:
|
|
|
|
i = 0
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
# calculate length for each column
|
|
|
|
for i, row in enumerate(longest_col):
|
|
|
|
longest_col[i] = row + width
|
|
|
|
# build rows
|
|
|
|
row = []
|
|
|
|
table = []
|
|
|
|
i = 0
|
|
|
|
for itm in data:
|
|
|
|
if i % col_num == 0:
|
|
|
|
if row:
|
|
|
|
table.append(''.join(row))
|
|
|
|
i = 0
|
|
|
|
row = [itm.ljust(longest_col[i])]
|
|
|
|
else:
|
|
|
|
# there is no need to justify the entries of the last column
|
|
|
|
if i == col_num -1:
|
|
|
|
row.append(itm)
|
|
|
|
else:
|
|
|
|
row.append(itm.ljust(longest_col[i]))
|
|
|
|
i += 1
|
|
|
|
table.append(''.join(row))
|
|
|
|
return table
|
|
|
|
|
2007-08-09 13:35:08 +02:00
|
|
|
def search(apiurl, search_list, kind, search_term, verbose = False, exact_matches = False, repos_baseurl = False):
|
2007-07-15 15:49:13 +02:00
|
|
|
"""
|
|
|
|
Perform a search for 'search_term'. A list which contains the
|
|
|
|
results will be returned on success otherwise 'None'. If 'verbose' is true
|
|
|
|
and the title-tag-text (<title>TEXT</title>) is longer than 60 chars it'll we
|
|
|
|
truncated.
|
|
|
|
"""
|
|
|
|
|
|
|
|
predicate = build_xpath_predicate(search_list, search_term, exact_matches)
|
|
|
|
u = makeurl(apiurl, ['search', kind], ['match=%s' % quote_plus(''.join(predicate))])
|
|
|
|
f = http_GET(u)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
result = []
|
|
|
|
for node in root.findall(kind):
|
|
|
|
# TODO: clarify if we need to check if node.get() returns 'None'.
|
|
|
|
# If it returns 'None' something is broken anyway...
|
|
|
|
if kind == 'package':
|
|
|
|
project = node.get('project')
|
|
|
|
package = node.get('name')
|
|
|
|
result.append(package)
|
|
|
|
else:
|
|
|
|
project = node.get('name')
|
|
|
|
result.append(project)
|
|
|
|
if verbose:
|
|
|
|
title = node.findtext('title').strip()
|
|
|
|
if len(title) > 60:
|
|
|
|
title = title[:61] + '...'
|
|
|
|
result.append(title)
|
2007-08-09 13:35:08 +02:00
|
|
|
if repos_baseurl:
|
|
|
|
result.append('http://download.opensuse.org/repositories/%s/' % project.replace(':', ':/'))
|
2007-07-15 15:49:13 +02:00
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2007-07-23 18:03:30 +02:00
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def delete_dir(dir):
|
2007-07-23 18:03:30 +02:00
|
|
|
# small security checks
|
2008-03-10 19:04:23 +01:00
|
|
|
if os.path.islink(dir):
|
2007-07-23 18:03:30 +02:00
|
|
|
return False
|
2008-03-10 19:04:23 +01:00
|
|
|
elif os.path.abspath(dir) == '/':
|
2007-07-23 18:03:30 +02:00
|
|
|
return False
|
2008-03-10 19:04:23 +01:00
|
|
|
elif not os.path.isdir(dir):
|
2007-07-23 18:03:30 +02:00
|
|
|
return False
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
for dirpath, dirnames, filenames in os.walk(dir, topdown=False):
|
2007-07-23 18:03:30 +02:00
|
|
|
for file in filenames:
|
|
|
|
try:
|
|
|
|
os.unlink(os.path.join(dirpath, file))
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
for dirname in dirnames:
|
|
|
|
try:
|
|
|
|
os.rmdir(os.path.join(dirpath, dirname))
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
try:
|
2008-03-10 19:04:23 +01:00
|
|
|
os.rmdir(dir)
|
2007-07-23 18:03:30 +02:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def delete_tmpdir(tmpdir):
|
|
|
|
"""
|
|
|
|
This method deletes a tempdir. This tempdir
|
|
|
|
must be located under /tmp/$DIR. If "tmpdir" is not
|
|
|
|
a valid tempdir it'll return False. If os.unlink() / os.rmdir()
|
|
|
|
throws an exception we will return False too - otherwise
|
|
|
|
True.
|
|
|
|
"""
|
|
|
|
|
|
|
|
head, tail = os.path.split(tmpdir)
|
|
|
|
if not head.startswith('/tmp') or not tail:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return delete_dir(tmpdir)
|
|
|
|
|
|
|
|
def delete_storedir(store_dir):
|
|
|
|
"""
|
|
|
|
This method deletes a store dir.
|
|
|
|
"""
|
|
|
|
head, tail = os.path.split(store_dir)
|
|
|
|
if tail == '.osc':
|
|
|
|
return delete_dir(store_dir)
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2007-07-23 18:03:30 +02:00
|
|
|
def unpack_srcrpm(srpm, dir, *files):
|
|
|
|
"""
|
|
|
|
This method unpacks the passed srpm into the
|
|
|
|
passed dir. If arguments are passed to the \'files\' tuple
|
|
|
|
only this files will be unpacked.
|
|
|
|
"""
|
|
|
|
if not is_srcrpm(srpm):
|
|
|
|
print >>sys.stderr, 'error - \'%s\' is not a source rpm.' % srpm
|
|
|
|
sys.exit(1)
|
|
|
|
curdir = os.getcwd()
|
|
|
|
if not os.path.isdir(dir):
|
|
|
|
dir = curdir
|
|
|
|
else:
|
|
|
|
os.chdir(dir)
|
|
|
|
cmd = 'rpm2cpio %s | cpio -i %s &> /dev/null' % (srpm, ' '.join(files))
|
|
|
|
ret = os.system(cmd)
|
|
|
|
if ret != 0:
|
|
|
|
print >>sys.stderr, 'error \'%s\' - cannot extract \'%s\'' % (ret, srpm)
|
|
|
|
sys.exit(1)
|
|
|
|
os.chdir(curdir)
|
|
|
|
|
2007-07-29 17:27:27 +02:00
|
|
|
def tag_to_rpmpy(tag):
|
2007-07-23 18:03:30 +02:00
|
|
|
"""
|
2007-07-29 17:27:27 +02:00
|
|
|
maps a spec file tag/section to a valid
|
|
|
|
rpm-python RPMTAG
|
2007-07-23 18:03:30 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
2007-07-29 17:27:27 +02:00
|
|
|
import rpm
|
|
|
|
tags = { 'Name:' : rpm.RPMTAG_NAME,
|
|
|
|
'Summary:' : rpm.RPMTAG_SUMMARY,
|
|
|
|
'%description' : rpm.RPMTAG_DESCRIPTION
|
|
|
|
}
|
|
|
|
if tag in tags.keys():
|
|
|
|
return tags[tag]
|
2007-07-23 18:03:30 +02:00
|
|
|
else:
|
2007-07-29 17:27:27 +02:00
|
|
|
return None
|
|
|
|
except ImportError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def data_from_rpm(rpm_file, *rpmdata):
|
|
|
|
"""
|
|
|
|
This method reads the given rpmdata
|
|
|
|
from a rpm.
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
import rpm
|
|
|
|
ts = rpm.TransactionSet()
|
|
|
|
file = open(rpm_file, 'r')
|
|
|
|
header = ts.hdrFromFdno(file.fileno())
|
|
|
|
file.close()
|
|
|
|
data = {}
|
|
|
|
for itm in rpmdata:
|
|
|
|
rpmpy = tag_to_rpmpy(itm)
|
|
|
|
if rpmpy:
|
|
|
|
data[itm] = header[rpmpy]
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, 'invalid data \'%s\'' % itm
|
|
|
|
sys.exit(1)
|
|
|
|
return data
|
|
|
|
except ImportError:
|
|
|
|
print >>sys.stderr, 'warning: rpm-python not found'
|
|
|
|
return None
|
2007-07-23 18:03:30 +02:00
|
|
|
|
|
|
|
def is_rpm(f):
|
|
|
|
"""check if the named file is an RPM package"""
|
|
|
|
try:
|
|
|
|
h = open(f).read(4)
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if h == '\xed\xab\xee\xdb':
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_srcrpm(f):
|
|
|
|
"""check if the named file is a source RPM"""
|
|
|
|
|
|
|
|
if not is_rpm(f):
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
h = open(f).read(8)
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if h[7] == '\x01':
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2007-07-30 13:12:42 +02:00
|
|
|
|
|
|
|
def delete_server_files(apiurl, prj, pac, files):
|
|
|
|
"""
|
|
|
|
This method deletes the given filelist on the
|
|
|
|
server. No local data will be touched.
|
|
|
|
"""
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
try:
|
|
|
|
u = makeurl(apiurl, ['source', prj, pac, file])
|
|
|
|
http_DELETE(u)
|
|
|
|
except:
|
|
|
|
# we do not handle all exceptions here - we need another solution
|
|
|
|
# see bug #280034
|
|
|
|
print >>sys.stderr, 'error while deleting file \'%s\'' % file
|
|
|
|
sys.exit(1)
|
2007-09-12 20:01:13 +02:00
|
|
|
|
|
|
|
def addMaintainer(apiurl, prj, pac, user):
|
|
|
|
""" add a new maintainer to a package or project """
|
|
|
|
path = quote_plus(prj),
|
|
|
|
kind = 'prj'
|
|
|
|
if pac:
|
|
|
|
path = path + (quote_plus(pac),)
|
|
|
|
kind = 'pkg'
|
|
|
|
data = meta_exists(metatype=kind,
|
|
|
|
path_args=path,
|
|
|
|
template_args=None,
|
|
|
|
create_new=False)
|
|
|
|
|
|
|
|
if data and get_user_meta(apiurl, user) != None:
|
|
|
|
tree = ET.fromstring(''.join(data))
|
|
|
|
found = False
|
|
|
|
for person in tree.getiterator('person'):
|
|
|
|
if person.get('userid') == user:
|
|
|
|
found = True
|
|
|
|
print "user already exists"
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
# the xml has a fixed structure
|
|
|
|
tree.insert(2, ET.Element('person', role='maintainer', userid=user))
|
|
|
|
print 'user \'%s\' added to \'%s\'' % (user, pac or prj)
|
|
|
|
edit_meta(metatype=kind,
|
|
|
|
path_args=path,
|
|
|
|
data=ET.tostring(tree))
|
|
|
|
else:
|
|
|
|
print "osc: an error occured"
|
|
|
|
|
|
|
|
def delMaintainer(apiurl, prj, pac, user):
|
|
|
|
""" delete a maintainer from a package or project """
|
|
|
|
path = quote_plus(prj),
|
|
|
|
kind = 'prj'
|
|
|
|
if pac:
|
|
|
|
path = path + (quote_plus(pac), )
|
|
|
|
kind = 'pkg'
|
|
|
|
data = meta_exists(metatype=kind,
|
|
|
|
path_args=path,
|
|
|
|
template_args=None,
|
|
|
|
create_new=False)
|
|
|
|
if data:
|
|
|
|
tree = ET.fromstring(''.join(data))
|
|
|
|
found = False
|
|
|
|
for person in tree.getiterator('person'):
|
|
|
|
if person.get('userid') == user:
|
|
|
|
tree.remove(person)
|
|
|
|
found = True
|
|
|
|
print "user \'%s\' removed" % user
|
|
|
|
if found:
|
|
|
|
edit_meta(metatype=kind,
|
|
|
|
path_args=path,
|
|
|
|
data=ET.tostring(tree))
|
|
|
|
else:
|
|
|
|
print "user \'%s\' not found in \'%s\'" % (user, pac or prj)
|
|
|
|
else:
|
|
|
|
print "an error occured"
|
2008-03-10 19:04:23 +01:00
|
|
|
|
2008-03-15 23:51:37 +01:00
|
|
|
def createPackageDir(pathname, prj_obj=None):
|
|
|
|
"""
|
|
|
|
create and initialize a new package dir in the given project.
|
|
|
|
prj_obj can be a Project() instance.
|
|
|
|
"""
|
|
|
|
prj_dir, pac_dir = getPrjPacPaths(pathname)
|
|
|
|
if is_project_dir(prj_dir):
|
|
|
|
if not os.path.exists(pac_dir):
|
|
|
|
prj = prj_obj or Project(prj_dir, False)
|
|
|
|
if prj.addPackage(pac_dir):
|
|
|
|
os.mkdir(pathname)
|
|
|
|
os.chdir(pathname)
|
|
|
|
init_package_dir(prj.apiurl,
|
|
|
|
prj.name,
|
|
|
|
pac_dir, pac_dir, files=False)
|
|
|
|
os.chdir(prj.absdir)
|
|
|
|
print statfrmt('A', os.path.normpath(pathname))
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2008-03-18 00:43:32 +01:00
|
|
|
else:
|
|
|
|
print '\'%s\' already exists' % pathname
|
|
|
|
return False
|
2008-03-15 23:51:37 +01:00
|
|
|
else:
|
|
|
|
print '\'%s\' is not a working copy' % prj_dir
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2008-03-10 19:04:23 +01:00
|
|
|
def addFiles(filenames):
|
|
|
|
for filename in filenames:
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
print >>sys.stderr, "file '%s' does not exist" % filename
|
|
|
|
return 1
|
|
|
|
|
|
|
|
# init a package dir if we have a normal dir in the "filenames"-list
|
|
|
|
# so that it will be find by findpacs() later
|
|
|
|
for filename in filenames:
|
|
|
|
|
|
|
|
prj_dir, pac_dir = getPrjPacPaths(filename)
|
|
|
|
|
|
|
|
if not is_package_dir(filename) and os.path.isdir(filename) and is_project_dir(prj_dir) \
|
|
|
|
and conf.config['do_package_tracking']:
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
prj_name = store_read_project(prj_dir)
|
|
|
|
prj_apiurl = store_read_apiurl(prj_dir)
|
|
|
|
os.chdir(filename)
|
|
|
|
init_package_dir(prj_apiurl, prj_name, pac_dir, pac_dir, files=False)
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
|
|
|
elif is_package_dir(filename) and conf.config['do_package_tracking']:
|
|
|
|
print 'osc: warning: \'%s\' is already under version control' % filename
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
pacs = findpacs(filenames)
|
|
|
|
|
|
|
|
for pac in pacs:
|
|
|
|
if conf.config['do_package_tracking'] and not pac.todo:
|
|
|
|
prj = Project(os.path.dirname(pac.absdir))
|
|
|
|
if pac.name in prj.pacs_unvers:
|
|
|
|
prj.addPackage(pac.name)
|
|
|
|
print statfrmt('A', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
|
|
|
|
for filename in pac.filenamelist_unvers:
|
|
|
|
pac.todo.append(filename)
|
|
|
|
elif pac.name in prj.pacs_have:
|
|
|
|
print 'osc: warning: \'%s\' is already under version control' % pac.name
|
|
|
|
for filename in pac.todo:
|
|
|
|
if filename in pac.excluded:
|
|
|
|
continue
|
|
|
|
if filename in pac.filenamelist:
|
|
|
|
print >>sys.stderr, 'osc: warning: \'%s\' is already under version control' % filename
|
|
|
|
continue
|
|
|
|
if pac.dir != '.':
|
|
|
|
pathname = os.path.join(pac.dir, filename)
|
|
|
|
else:
|
|
|
|
pathname = filename
|
|
|
|
print statfrmt('A', pathname)
|
|
|
|
pac.addfile(filename)
|
|
|
|
|
|
|
|
def getPrjPacPaths(path):
|
|
|
|
"""
|
|
|
|
returns the path for a project and a package
|
|
|
|
from path. This is needed if you try to add
|
|
|
|
or delete packages:
|
|
|
|
Examples:
|
|
|
|
osc add pac1/: prj_dir = CWD;
|
|
|
|
pac_dir = pac1
|
|
|
|
osc add /path/to/pac1:
|
|
|
|
prj_dir = path/to;
|
|
|
|
pac_dir = pac1
|
|
|
|
osc add /path/to/pac1/file
|
|
|
|
=> this would be an invalid path
|
|
|
|
the caller has to validate the returned
|
|
|
|
path!
|
|
|
|
"""
|
|
|
|
# make sure we hddave a dir: osc add bar vs. osc add bar/; osc add /path/to/prj_dir/new_pack
|
|
|
|
# filename = os.path.join(tail, '')
|
|
|
|
prj_dir, pac_dir = os.path.split(os.path.normpath(path))
|
|
|
|
if prj_dir == '':
|
|
|
|
prj_dir = os.getcwd()
|
|
|
|
return (prj_dir, pac_dir)
|
|
|
|
|
|
|
|
def getTransActPath(pac_dir):
|
|
|
|
"""
|
|
|
|
returns the path for the commit and update operations/transactions.
|
|
|
|
Normally the "dir" attribute of a Package() object will be passed to
|
|
|
|
this method.
|
|
|
|
"""
|
|
|
|
if pac_dir != '.':
|
|
|
|
pathn = os.path.normpath(pac_dir)
|
|
|
|
else:
|
|
|
|
pathn = ''
|
|
|
|
return pathn
|