2006-04-20 16:26:50 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
# Copyright (C) 2006 Peter Poeml. All rights reserved.
|
|
|
|
# This program is free software; it may be used, copied, modified
|
|
|
|
# and distributed under the terms of the GNU General Public Licence,
|
|
|
|
# either version 2, or (at your option) any later version.
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
__version__ = '0.2'
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import urllib2
|
|
|
|
import netrc
|
|
|
|
from urlparse import urlunsplit
|
2006-05-02 10:17:45 +02:00
|
|
|
import cElementTree as ET
|
|
|
|
from cStringIO import StringIO
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
# the needed entry in .netrc looks like this:
|
|
|
|
# machine api.opensuse.org login your_login password your_pass
|
|
|
|
info = netrc.netrc()
|
|
|
|
username, account, password = info.authenticators("api.opensuse.org")
|
|
|
|
|
|
|
|
from xml.dom.ext.reader import Sax2
|
|
|
|
from xml.dom.ext import PrettyPrint
|
|
|
|
|
|
|
|
netloc = 'api.opensuse.org'
|
|
|
|
scheme = 'http'
|
|
|
|
|
|
|
|
BUFSIZE = 1024*1024
|
|
|
|
store = '.osc'
|
|
|
|
exclude_stuff = [store, '.svn', 'CVS']
|
|
|
|
|
|
|
|
|
2006-05-11 09:27:50 +02:00
|
|
|
def parseargs():
|
|
|
|
if len(sys.argv) > 2:
|
|
|
|
args = sys.argv[2:]
|
|
|
|
else:
|
|
|
|
args = [ os.curdir ]
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def makeurl(l):
|
|
|
|
"""given a list of path compoments, construct a complete URL"""
|
|
|
|
return urlunsplit((scheme, netloc, '/'.join(l), '', ''))
|
|
|
|
|
|
|
|
|
|
|
|
def copy_file(src, dst):
|
|
|
|
s = open(src)
|
|
|
|
d = open(dst, 'w')
|
|
|
|
while 1:
|
|
|
|
buf = s.read(BUFSIZE)
|
|
|
|
if not buf: break
|
|
|
|
d.write(buf)
|
|
|
|
s.close()
|
|
|
|
d.close()
|
|
|
|
|
|
|
|
|
|
|
|
def init_basicauth():
|
|
|
|
|
|
|
|
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
# this creates a password manager
|
|
|
|
passmgr.add_password(None, netloc, username, password)
|
|
|
|
# because we have put None at the start it will always
|
|
|
|
# use this username/password combination for urls
|
|
|
|
# for which `netloc` is a super-url
|
|
|
|
|
|
|
|
authhandler = urllib2.HTTPBasicAuthHandler(passmgr)
|
|
|
|
# create the AuthHandler
|
|
|
|
|
|
|
|
opener = urllib2.build_opener(authhandler)
|
|
|
|
|
|
|
|
urllib2.install_opener(opener)
|
|
|
|
# All calls to urllib2.urlopen will now use our handler
|
|
|
|
# Make sure not to include the protocol in with the URL, or
|
|
|
|
# HTTPPasswordMgrWithDefaultRealm will be very confused.
|
|
|
|
# You must (of course) use it when fetching the page though.
|
|
|
|
|
|
|
|
|
|
|
|
def init_package_dir(project, package, dir):
|
|
|
|
if not os.path.isdir(store):
|
|
|
|
os.mkdir(store)
|
|
|
|
os.chdir(store)
|
|
|
|
f = open('_project', 'w')
|
|
|
|
f.write(project + '\n')
|
|
|
|
f.close
|
|
|
|
f = open('_package', 'w')
|
|
|
|
f.write(package + '\n')
|
|
|
|
f.close
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
f = open('_files', 'w')
|
|
|
|
f.write(''.join(show_files_meta(project, package)))
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
f = open('_osclib_version', 'w')
|
|
|
|
f.write(__version__ + '\n')
|
2006-04-20 16:26:50 +02:00
|
|
|
f.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
def check_store_version():
|
|
|
|
try:
|
|
|
|
v = open(os.path.join(store, '_osclib_version')).read().strip()
|
|
|
|
except:
|
|
|
|
v = ''
|
|
|
|
|
|
|
|
if v != __version__:
|
|
|
|
print
|
|
|
|
print 'the osc metadata of your working copy'
|
|
|
|
print ' %s' % os.getcwd()
|
|
|
|
print 'has the wrong version (%s), should be %s' % (v, __version__)
|
|
|
|
print 'please do a fresh checkout'
|
|
|
|
print
|
|
|
|
sys.exit(1)
|
2006-05-11 09:27:50 +02:00
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def meta_get_packagelist(prj):
|
|
|
|
|
|
|
|
u = makeurl(['source', prj, '_meta'])
|
|
|
|
f = urllib2.urlopen(u)
|
2006-05-02 10:17:45 +02:00
|
|
|
|
|
|
|
tree = ET.parse(f)
|
|
|
|
root = tree.getroot()
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in root.findall('package'):
|
|
|
|
r.append(node.get('name'))
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def meta_get_filelist(prj, package):
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
u = makeurl(['source', prj, package])
|
2006-04-20 16:26:50 +02:00
|
|
|
f = urllib2.urlopen(u)
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(f)
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in tree.getroot():
|
|
|
|
r.append(node.get('name'))
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def localmeta_addfile(filename):
|
|
|
|
|
|
|
|
if filename in localmeta_get_filelist():
|
|
|
|
return
|
|
|
|
|
|
|
|
reader = Sax2.Reader()
|
2006-04-28 17:37:25 +02:00
|
|
|
f = open(os.path.join(store, '_files')).read()
|
2006-04-20 16:26:50 +02:00
|
|
|
doc = reader.fromString(f)
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
new = doc.createElement('entry')
|
|
|
|
#new.setAttribute('filetype', 'source')
|
|
|
|
new.setAttribute('name', filename)
|
2006-04-20 16:26:50 +02:00
|
|
|
doc.documentElement.appendChild(new)
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
o = open(os.path.join(store, '_files'), 'w')
|
2006-04-20 16:26:50 +02:00
|
|
|
PrettyPrint(doc, stream=o)
|
|
|
|
o.close()
|
|
|
|
|
|
|
|
def localmeta_removefile(filename):
|
|
|
|
|
|
|
|
reader = Sax2.Reader()
|
2006-04-28 17:37:25 +02:00
|
|
|
f = open(os.path.join(store, '_files')).read()
|
2006-04-20 16:26:50 +02:00
|
|
|
doc = reader.fromString(f)
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
for i in doc.getElementsByTagName('entry'):
|
|
|
|
if i.getAttribute('name') == filename:
|
2006-04-20 16:26:50 +02:00
|
|
|
i.parentNode.removeChild(i)
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
o = open(os.path.join(store, '_files'), 'w')
|
2006-04-20 16:26:50 +02:00
|
|
|
PrettyPrint(doc, stream=o)
|
|
|
|
o.close()
|
|
|
|
|
|
|
|
|
|
|
|
def localmeta_get_filelist():
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(os.path.join(store, '_files'))
|
|
|
|
root = tree.getroot()
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in root.findall('entry'):
|
|
|
|
r.append(node.get('name'))
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def get_slash_source():
|
|
|
|
u = makeurl(['source'])
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(urllib2.urlopen(u))
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in tree.getroot():
|
|
|
|
r.append(node.get('name'))
|
|
|
|
r.sort()
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
def show_project_meta(prj):
|
|
|
|
f = urllib2.urlopen(makeurl(['source', prj, '_meta']))
|
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
|
|
|
def show_package_meta(prj, pac):
|
|
|
|
f = urllib2.urlopen(makeurl(['source', prj, pac, '_meta']))
|
|
|
|
return f.readlines()
|
|
|
|
|
2006-04-28 17:37:25 +02:00
|
|
|
def show_files_meta(prj, pac):
|
|
|
|
f = urllib2.urlopen(makeurl(['source', prj, pac]))
|
|
|
|
return f.readlines()
|
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def get_user_id(user):
|
|
|
|
u = makeurl(['person', user])
|
|
|
|
f = urllib2.urlopen(u)
|
|
|
|
return f.readlines()
|
|
|
|
|
|
|
|
|
|
|
|
def get_source_file(prj, package, filename):
|
|
|
|
u = makeurl(['source', prj, package, filename])
|
|
|
|
#print 'checking out', u
|
|
|
|
f = urllib2.urlopen(u)
|
|
|
|
|
|
|
|
o = open(filename, 'w')
|
|
|
|
while 1:
|
|
|
|
buf = f.read(BUFSIZE)
|
|
|
|
if not buf: break
|
|
|
|
o.write(buf)
|
|
|
|
o.close()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def dgst(file):
|
|
|
|
|
|
|
|
if not os.path.exists(file):
|
|
|
|
return None
|
|
|
|
|
|
|
|
import sha
|
|
|
|
s = sha.new()
|
|
|
|
f = open(file, 'r')
|
|
|
|
while 1:
|
|
|
|
buf = f.read(BUFSIZE)
|
|
|
|
if not buf: break
|
|
|
|
s.update(buf)
|
|
|
|
return s.digest()
|
|
|
|
|
|
|
|
|
2006-04-24 14:04:20 +02:00
|
|
|
def get_file_status(prj, package, filename, filelist=None):
|
2006-04-20 16:26:50 +02:00
|
|
|
"""
|
|
|
|
status can be:
|
|
|
|
|
|
|
|
file storefile file present STATUS
|
2006-04-28 17:37:25 +02:00
|
|
|
exists exists in _files
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
x x - 'D'
|
|
|
|
x x x 'M', if digest differs, else ' '
|
|
|
|
x - - '?'
|
|
|
|
x - x 'A'
|
|
|
|
- x x '!'
|
2006-04-25 14:25:53 +02:00
|
|
|
- x - 'D' (when file in working copy is already deleted)
|
2006-04-20 16:26:50 +02:00
|
|
|
- - x NOT DEFINED
|
|
|
|
- - - NEVER REACHED
|
|
|
|
|
|
|
|
"""
|
|
|
|
known_by_meta = False
|
|
|
|
exists = False
|
|
|
|
exists_in_store = False
|
|
|
|
|
2006-04-24 14:04:20 +02:00
|
|
|
if not filelist:
|
|
|
|
filelist = localmeta_get_filelist()
|
|
|
|
|
|
|
|
if filename in filelist:
|
2006-04-20 16:26:50 +02:00
|
|
|
known_by_meta = True
|
|
|
|
|
|
|
|
if os.path.exists(filename):
|
|
|
|
exists = True
|
|
|
|
|
|
|
|
if os.path.exists(os.path.join(store, filename)):
|
|
|
|
exists_in_store = True
|
|
|
|
|
|
|
|
if exists and exists_in_store and not known_by_meta:
|
|
|
|
state = 'D'
|
|
|
|
elif exists and exists_in_store and known_by_meta:
|
|
|
|
if dgst(filename) != dgst(os.path.join(store, filename)):
|
|
|
|
state = 'M'
|
|
|
|
else:
|
|
|
|
state = ' '
|
|
|
|
elif exists and not exists_in_store and not known_by_meta:
|
|
|
|
state = '?'
|
|
|
|
elif exists and not exists_in_store and known_by_meta:
|
|
|
|
state = 'A'
|
|
|
|
elif not exists and exists_in_store and known_by_meta:
|
|
|
|
state = '!'
|
|
|
|
elif not exists and not exists_in_store and known_by_meta:
|
2006-04-25 14:25:53 +02:00
|
|
|
print '%s: not exists and not exists_in_store and known_by_meta' % filename
|
2006-04-20 16:26:50 +02:00
|
|
|
print 'this state is undefined!'
|
|
|
|
sys.exit(1)
|
|
|
|
elif not exists and exists_in_store and not known_by_meta:
|
2006-04-25 14:25:53 +02:00
|
|
|
state = 'D'
|
2006-04-20 16:26:50 +02:00
|
|
|
elif not exists and not exists_in_store and not known_by_meta:
|
2006-04-25 14:25:53 +02:00
|
|
|
print '%s: not exists and not exists_in_store and not nown_by_meta' % filename
|
2006-04-20 16:26:50 +02:00
|
|
|
print 'this code path should never be reached!'
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
return '%s %s' % (state, filename)
|
|
|
|
|
|
|
|
|
|
|
|
def get_source_file_diff(prj, package, filename):
|
|
|
|
url = makeurl(['source', prj, package, filename])
|
|
|
|
f = urllib2.urlopen(url)
|
|
|
|
|
|
|
|
localfile = open(filename, 'r')
|
|
|
|
|
|
|
|
import difflib
|
|
|
|
#print url
|
|
|
|
d = difflib.unified_diff(f.readlines(), localfile.readlines(), fromfile = url, tofile = filename)
|
|
|
|
|
|
|
|
localfile.close()
|
|
|
|
|
|
|
|
return ''.join(d)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def put_source_file(prj, package, filename):
|
|
|
|
import othermethods
|
|
|
|
|
|
|
|
sys.stdout.write('.')
|
|
|
|
u = makeurl(['source', prj, package, os.path.basename(filename)])
|
|
|
|
othermethods.putfile(u, filename, username, password)
|
|
|
|
|
|
|
|
|
|
|
|
def del_source_file(prj, package, filename):
|
|
|
|
import othermethods
|
|
|
|
|
|
|
|
u = makeurl(['source', prj, package, filename])
|
2006-04-28 17:37:25 +02:00
|
|
|
othermethods.delfile(u, filename, username, password)
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
wcfilename = os.path.join(store, filename)
|
|
|
|
if os.path.exists(filename): os.unlink(filename)
|
|
|
|
if os.path.exists(wcfilename): os.unlink(wcfilename)
|
|
|
|
|
|
|
|
|
|
|
|
def make_dir(project, package):
|
|
|
|
#print "creating directory '%s'" % project
|
|
|
|
print 'A %s' % project
|
|
|
|
if not os.path.exists(project):
|
|
|
|
os.mkdir(project)
|
|
|
|
os.mkdir(os.path.join(project, store))
|
|
|
|
|
|
|
|
#print "creating directory '%s/%s'" % (project, package)
|
|
|
|
print 'A %s/%s' % (project, package)
|
|
|
|
if not os.path.exists(os.path.join(project, package)):
|
|
|
|
os.mkdir(os.path.join(project, package))
|
|
|
|
os.mkdir(os.path.join(project, package, store))
|
|
|
|
|
|
|
|
return(os.path.join(project, package))
|
|
|
|
|
|
|
|
|
|
|
|
def checkout_package(project, package):
|
|
|
|
olddir = os.getcwd()
|
|
|
|
|
|
|
|
os.chdir(make_dir(project, package))
|
|
|
|
for filename in meta_get_filelist(project, package):
|
|
|
|
get_source_file(project, package, filename)
|
|
|
|
copy_file(filename, os.path.join(store, filename))
|
|
|
|
print 'A ', os.path.join(project, package, filename)
|
|
|
|
|
|
|
|
init_package_dir(project, package, store)
|
|
|
|
|
|
|
|
os.chdir(olddir)
|
|
|
|
|
|
|
|
|
|
|
|
def get_platforms():
|
|
|
|
f = urllib2.urlopen(makeurl(['platform']))
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(f)
|
2006-04-20 16:26:50 +02:00
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in tree.getroot():
|
|
|
|
r.append(node.get('name'))
|
|
|
|
r.sort()
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def get_platforms_of_project(prj):
|
|
|
|
f = show_project_meta(prj)
|
2006-05-02 10:17:45 +02:00
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
2006-04-20 16:26:50 +02:00
|
|
|
|
|
|
|
r = []
|
2006-05-02 10:17:45 +02:00
|
|
|
for node in tree.findall('repository'):
|
|
|
|
r.append(node.get('name'))
|
2006-04-20 16:26:50 +02:00
|
|
|
return r
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-29 22:44:09 +02:00
|
|
|
def show_results_meta(prj, package, platform):
|
2006-04-20 16:26:50 +02:00
|
|
|
u = makeurl(['result', prj, platform, package, 'result'])
|
|
|
|
f = urllib2.urlopen(u)
|
|
|
|
return f.readlines()
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-29 22:44:09 +02:00
|
|
|
def get_results(prj, package, platform):
|
|
|
|
#print '----------------------------------------'
|
|
|
|
|
|
|
|
r = []
|
|
|
|
#result_line_templ = '%(prj)-15s %(pac)-15s %(rep)-15s %(arch)-10s %(status)s'
|
2006-05-11 09:27:50 +02:00
|
|
|
result_line_templ = '%(rep)-15s %(arch)-10s %(status)s'
|
2006-04-29 22:44:09 +02:00
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
f = show_results_meta(prj, package, platform)
|
|
|
|
tree = ET.parse(StringIO(''.join(f)))
|
|
|
|
|
|
|
|
root = tree.getroot()
|
|
|
|
|
|
|
|
rmap = {}
|
|
|
|
rmap['prj'] = root.get('project')
|
|
|
|
rmap['pac'] = root.get('package')
|
|
|
|
rmap['rep'] = root.get('repository')
|
|
|
|
|
|
|
|
for node in root.findall('archresult'):
|
|
|
|
rmap['arch'] = node.get('arch')
|
|
|
|
|
|
|
|
statusnode = node.find('status')
|
|
|
|
rmap['status'] = statusnode.get('code')
|
|
|
|
|
|
|
|
if rmap['status'] == 'expansion error':
|
|
|
|
rmap['status'] += ': ' + statusnode.find('summary').text
|
|
|
|
|
|
|
|
if rmap['status'] == 'failed':
|
2006-05-11 09:27:50 +02:00
|
|
|
rmap['status'] += ': %s://%s' % (scheme, netloc) + \
|
|
|
|
'/result/%(prj)s/%(rep)s/%(pac)s/%(arch)s/log' % rmap
|
2006-04-29 22:44:09 +02:00
|
|
|
|
|
|
|
r.append(result_line_templ % rmap)
|
|
|
|
return r
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def get_log(prj, package, platform, arch):
|
|
|
|
u = makeurl(['result', prj, platform, package, arch, 'log'])
|
|
|
|
f = urllib2.urlopen(u)
|
|
|
|
return f.readlines()
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def store_read_project(dir):
|
|
|
|
p = open(os.path.join(dir, store, '_project')).readlines()[0].strip()
|
|
|
|
return p
|
|
|
|
|
2006-05-02 10:17:45 +02:00
|
|
|
|
2006-04-20 16:26:50 +02:00
|
|
|
def store_read_package(dir):
|
|
|
|
p = open(os.path.join(dir, store, '_package')).readlines()[0].strip()
|
|
|
|
return p
|
|
|
|
|
|
|
|
|