mirror of
https://github.com/openSUSE/osc.git
synced 2025-01-15 01:56:17 +01:00
- add 'osc repairlink' command
- pass revision correctly so that 'osc co' can checkout an old revision again
This commit is contained in:
parent
b8642c4ee0
commit
9dbfeeb2ef
@ -1534,7 +1534,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
|||||||
if len(pacs) > 1:
|
if len(pacs) > 1:
|
||||||
print 'Updating %s' % p.name
|
print 'Updating %s' % p.name
|
||||||
|
|
||||||
if opts.expand_link and p.haslinkerror():
|
if opts.expand_link and p.haslinkerror() and not p.islinkrepair():
|
||||||
raise oscerr.LinkExpandError(p.linkerror())
|
raise oscerr.LinkExpandError(p.linkerror())
|
||||||
|
|
||||||
if not rev:
|
if not rev:
|
||||||
@ -1545,8 +1545,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
|||||||
print 'Unexpanding to rev', p.linkinfo.lsrcmd5
|
print 'Unexpanding to rev', p.linkinfo.lsrcmd5
|
||||||
rev = p.linkinfo.lsrcmd5
|
rev = p.linkinfo.lsrcmd5
|
||||||
elif p.islink() and p.isexpanded():
|
elif p.islink() and p.isexpanded():
|
||||||
rev = show_upstream_xsrcmd5(p.apiurl,
|
rev = p.latest_rev()
|
||||||
p.prjname, p.name)
|
|
||||||
|
|
||||||
# FIXME: ugly workaround for #399247
|
# FIXME: ugly workaround for #399247
|
||||||
if opts.expand_link or opts.unexpand_link:
|
if opts.expand_link or opts.unexpand_link:
|
||||||
@ -2750,6 +2749,175 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
|||||||
for data in streamfile(u):
|
for data in streamfile(u):
|
||||||
sys.stdout.write(data)
|
sys.stdout.write(data)
|
||||||
|
|
||||||
|
|
||||||
|
# helper function to download a file from a specific revision
|
||||||
|
def download(self, name, md5, dir, destfile):
|
||||||
|
o = open(destfile, 'w')
|
||||||
|
if md5 != '':
|
||||||
|
query = {'rev': dir['srcmd5']}
|
||||||
|
u = makeurl(dir['apiurl'], ['source', dir['project'], dir['package'], pathname2url(name)], query=query)
|
||||||
|
for buf in streamfile(u, http_GET, BUFSIZE):
|
||||||
|
o.write(buf)
|
||||||
|
o.close
|
||||||
|
|
||||||
|
@cmdln.option('-d', '--destdir', default='repairlink', metavar='DIR',
|
||||||
|
help='destination directory')
|
||||||
|
@cmdln.option('-p', '--project', help='project with broken package')
|
||||||
|
@cmdln.option('', '--package', help='package with broken link')
|
||||||
|
def do_repairlink(self, subcmd, opts):
|
||||||
|
"""${cmd_name}: Repair a broken source link
|
||||||
|
|
||||||
|
${cmd_usage}
|
||||||
|
${cmd_option_list}
|
||||||
|
"""
|
||||||
|
|
||||||
|
apiurl = conf.config['apiurl']
|
||||||
|
prj = None
|
||||||
|
package = None
|
||||||
|
if is_package_dir(os.getcwd()):
|
||||||
|
apiurl = store_read_apiurl(os.getcwd())
|
||||||
|
prj = store_read_project(os.getcwd())
|
||||||
|
package = store_read_package(os.getcwd())
|
||||||
|
if opts.project:
|
||||||
|
prj = opts.project
|
||||||
|
if opts.package:
|
||||||
|
package = opts.package
|
||||||
|
if prj == None:
|
||||||
|
raise oscerr.WrongArgs('please specify a project')
|
||||||
|
if package == None:
|
||||||
|
raise oscerr.WrongArgs('please specify a package')
|
||||||
|
|
||||||
|
query = { 'lastworking': 1 }
|
||||||
|
u = makeurl(apiurl, ['source', prj, package], query=query)
|
||||||
|
f = http_GET(u)
|
||||||
|
root = ET.parse(f).getroot()
|
||||||
|
linkinfo = root.find('linkinfo')
|
||||||
|
if linkinfo == None:
|
||||||
|
raise oscerr.APIError('package is not a source link')
|
||||||
|
if linkinfo.get('error') == None:
|
||||||
|
raise oscerr.APIError('source link is not broken')
|
||||||
|
lastworkingrev = linkinfo.get('lastworking')
|
||||||
|
if lastworkingrev == None:
|
||||||
|
raise oscerr.APIError('source link never worked')
|
||||||
|
|
||||||
|
query = { 'expand': 1, 'emptylink': 1 }
|
||||||
|
u = makeurl(apiurl, ['source', prj, package], query=query)
|
||||||
|
f = http_GET(u)
|
||||||
|
meta = f.readlines()
|
||||||
|
root_new = ET.parse(StringIO(''.join(meta))).getroot()
|
||||||
|
dir_new = {'apiurl': apiurl}
|
||||||
|
dir_new['srcmd5'] = root_new.get('srcmd5')
|
||||||
|
dir_new['entries'] = map(lambda e: [e.get('name'), e.get('md5')], root_new.findall('entry'))
|
||||||
|
dir_new['project'] = linkinfo.get('project')
|
||||||
|
dir_new['package'] = linkinfo.get('package')
|
||||||
|
|
||||||
|
query = { 'rev': lastworkingrev }
|
||||||
|
u = makeurl(apiurl, ['source', prj, package], query=query)
|
||||||
|
f = http_GET(u)
|
||||||
|
root_oldpatched = ET.parse(f).getroot()
|
||||||
|
linkinfo_oldpatched = root_oldpatched.find('linkinfo')
|
||||||
|
if linkinfo_oldpatched == None:
|
||||||
|
raise oscerr.APIError('lastworking is not a source link?')
|
||||||
|
if linkinfo_oldpatched.get('error') != None:
|
||||||
|
raise oscerr.APIError('lastworking is not working?')
|
||||||
|
dir_oldpatched = {'apiurl': apiurl}
|
||||||
|
dir_oldpatched['srcmd5'] = root_oldpatched.get('srcmd5')
|
||||||
|
dir_oldpatched['entries'] = map(lambda e: [e.get('name'), e.get('md5')], root_oldpatched.findall('entry'))
|
||||||
|
dir_oldpatched['project'] = prj
|
||||||
|
dir_oldpatched['package'] = package
|
||||||
|
|
||||||
|
query = {}
|
||||||
|
query['rev'] = linkinfo_oldpatched.get('srcmd5')
|
||||||
|
u = makeurl(apiurl, ['source', linkinfo_oldpatched.get('project'), linkinfo_oldpatched.get('package')], query=query)
|
||||||
|
f = http_GET(u)
|
||||||
|
root_old = ET.parse(f).getroot()
|
||||||
|
dir_old = {'apiurl': apiurl}
|
||||||
|
dir_old['srcmd5'] = root_old.get('srcmd5')
|
||||||
|
dir_old['entries'] = map(lambda e: [e.get('name'), e.get('md5')], root_old.findall('entry'))
|
||||||
|
dir_old['project'] = linkinfo_oldpatched.get('project')
|
||||||
|
dir_old['package'] = linkinfo_oldpatched.get('package')
|
||||||
|
|
||||||
|
entries_old = dict(dir_old['entries'])
|
||||||
|
entries_oldpatched = dict(dir_oldpatched['entries'])
|
||||||
|
entries_new = dict(dir_new['entries'])
|
||||||
|
|
||||||
|
entries = {}
|
||||||
|
entries.update(entries_old)
|
||||||
|
entries.update(entries_oldpatched)
|
||||||
|
entries.update(entries_new)
|
||||||
|
|
||||||
|
destdir = opts.destdir
|
||||||
|
if os.path.isdir(destdir):
|
||||||
|
shutil.rmtree(destdir)
|
||||||
|
os.mkdir(destdir)
|
||||||
|
|
||||||
|
olddir=os.getcwd()
|
||||||
|
os.chdir(destdir)
|
||||||
|
init_package_dir(apiurl, prj, package, destdir, files=False)
|
||||||
|
os.chdir(olddir)
|
||||||
|
store_write_string(destdir, '_files', ''.join(meta));
|
||||||
|
store_write_string(destdir, '_linkrepair', '');
|
||||||
|
|
||||||
|
storedir = os.path.join(destdir, store)
|
||||||
|
|
||||||
|
conflictlist = []
|
||||||
|
deletelist = []
|
||||||
|
for name in sorted(entries.keys()):
|
||||||
|
md5_old = entries_old.get(name, '')
|
||||||
|
md5_new = entries_new.get(name, '')
|
||||||
|
md5_oldpatched = entries_oldpatched.get(name, '')
|
||||||
|
if md5_new != '':
|
||||||
|
self.download(name, md5_new, dir_new, os.path.join(storedir, name))
|
||||||
|
if md5_old == md5_new:
|
||||||
|
if md5_oldpatched == '':
|
||||||
|
deletelist.append(name)
|
||||||
|
continue
|
||||||
|
print " " + name
|
||||||
|
self.download(name, md5_oldpatched, dir_oldpatched, os.path.join(destdir, name))
|
||||||
|
continue
|
||||||
|
if md5_old == md5_oldpatched:
|
||||||
|
if md5_new == '':
|
||||||
|
continue
|
||||||
|
print " " + name
|
||||||
|
shutil.copy2(os.path.join(storedir, name), os.path.join(destdir, name))
|
||||||
|
continue
|
||||||
|
if md5_new == md5_oldpatched:
|
||||||
|
if md5_new == '':
|
||||||
|
continue
|
||||||
|
print "G " + name
|
||||||
|
shutil.copy2(os.path.join(storedir, name), os.path.join(destdir, name))
|
||||||
|
continue
|
||||||
|
self.download(name, md5_oldpatched, dir_oldpatched, os.path.join(destdir, name + '.mine'))
|
||||||
|
if md5_new != '':
|
||||||
|
shutil.copy2(os.path.join(storedir, name), os.path.join(destdir, name + '.new'))
|
||||||
|
else:
|
||||||
|
self.download(name, md5_new, dir_new, os.path.join(destdir, name + '.new'))
|
||||||
|
self.download(name, md5_old, dir_old, os.path.join(destdir, name + '.old'))
|
||||||
|
o = open(os.path.join(destdir, name), 'w')
|
||||||
|
code = subprocess.call(['diff3', '-m',
|
||||||
|
'-L', '.mine',
|
||||||
|
os.path.join(destdir, name + '.mine'),
|
||||||
|
'-L', '.old',
|
||||||
|
os.path.join(destdir, name + '.old'),
|
||||||
|
'-L', '.new',
|
||||||
|
os.path.join(destdir, name + '.new'),
|
||||||
|
], stdout=o)
|
||||||
|
if code == 0:
|
||||||
|
print "M " + name
|
||||||
|
elif code == 1:
|
||||||
|
print "C " + name
|
||||||
|
conflictlist.append(name)
|
||||||
|
else:
|
||||||
|
print "? " + name
|
||||||
|
conflictlist.append(name)
|
||||||
|
|
||||||
|
if len(conflictlist) != 0:
|
||||||
|
store_write_string(destdir, '_in_conflict', '\n'.join(conflictlist))
|
||||||
|
|
||||||
|
if len(deletelist) != 0:
|
||||||
|
store_write_string(destdir, '_to_be_deleted', '\n'.join(deletelist))
|
||||||
|
|
||||||
|
|
||||||
@cmdln.option('-m', '--message',
|
@cmdln.option('-m', '--message',
|
||||||
help='Change message')
|
help='Change message')
|
||||||
def do_vc(self, subcmd, opts, *args):
|
def do_vc(self, subcmd, opts, *args):
|
||||||
|
85
osc/core.py
85
osc/core.py
@ -446,14 +446,13 @@ class Project:
|
|||||||
p = Package(os.path.join(self.dir, pac))
|
p = Package(os.path.join(self.dir, pac))
|
||||||
rev = None
|
rev = None
|
||||||
if expand_link and p.islink() and not p.isexpanded():
|
if expand_link and p.islink() and not p.isexpanded():
|
||||||
print 'Expanding to rev', p.linkinfo.xsrcmd5
|
|
||||||
rev = p.linkinfo.xsrcmd5
|
rev = p.linkinfo.xsrcmd5
|
||||||
|
print 'Expanding to rev', rev
|
||||||
elif unexpand_link and p.islink() and p.isexpanded():
|
elif unexpand_link and p.islink() and p.isexpanded():
|
||||||
print 'Unexpanding to rev', p.linkinfo.lsrcmd5
|
|
||||||
rev = p.linkinfo.lsrcmd5
|
rev = p.linkinfo.lsrcmd5
|
||||||
|
print 'Unexpanding to rev', rev
|
||||||
elif p.islink() and p.isexpanded():
|
elif p.islink() and p.isexpanded():
|
||||||
rev = show_upstream_xsrcmd5(p.apiurl,
|
rev = p.latest_rev();
|
||||||
p.prjname, p.name)
|
|
||||||
print 'Updating %s' % p.name
|
print 'Updating %s' % p.name
|
||||||
p.update(rev)
|
p.update(rev)
|
||||||
elif state == 'D':
|
elif state == 'D':
|
||||||
@ -666,6 +665,9 @@ class Package:
|
|||||||
filename = os.path.join(self.dir, n)
|
filename = os.path.join(self.dir, n)
|
||||||
storefilename = os.path.join(self.storedir, n)
|
storefilename = os.path.join(self.storedir, n)
|
||||||
myfilename = os.path.join(self.dir, n + '.mine')
|
myfilename = os.path.join(self.dir, n + '.mine')
|
||||||
|
if self.islinkrepair():
|
||||||
|
upfilename = os.path.join(self.dir, n + '.new')
|
||||||
|
else:
|
||||||
upfilename = os.path.join(self.dir, n + '.r' + self.rev)
|
upfilename = os.path.join(self.dir, n + '.r' + self.rev)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -673,6 +675,8 @@ class Package:
|
|||||||
# the working copy may be updated, so the .r* ending may be obsolete...
|
# the working copy may be updated, so the .r* ending may be obsolete...
|
||||||
# then we don't care
|
# then we don't care
|
||||||
os.unlink(upfilename)
|
os.unlink(upfilename)
|
||||||
|
if self.islinkrepair():
|
||||||
|
os.unlink(os.path.join(self.dir, n + '.old'))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -716,10 +720,7 @@ class Package:
|
|||||||
|
|
||||||
def commit(self, msg=''):
|
def commit(self, msg=''):
|
||||||
# commit only if the upstream revision is the same as the working copy's
|
# commit only if the upstream revision is the same as the working copy's
|
||||||
if self.islink() and self.isexpanded():
|
upstream_rev = self.latest_rev();
|
||||||
upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
|
|
||||||
else:
|
|
||||||
upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
|
|
||||||
if self.rev != upstream_rev:
|
if self.rev != upstream_rev:
|
||||||
raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
|
raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
|
||||||
|
|
||||||
@ -743,8 +744,9 @@ class Package:
|
|||||||
|
|
||||||
if self.islink() and self.isexpanded():
|
if self.islink() and self.isexpanded():
|
||||||
# resolve the link into the upload revision
|
# resolve the link into the upload revision
|
||||||
u = makeurl(self.apiurl, ['source', self.prjname, self.name],
|
# XXX: do this always?
|
||||||
query='cmd=copy&rev=upload&expand=1')
|
query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
|
||||||
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
||||||
f = http_POST(u)
|
f = http_POST(u)
|
||||||
|
|
||||||
print 'Transmitting file data ',
|
print 'Transmitting file data ',
|
||||||
@ -765,16 +767,32 @@ class Package:
|
|||||||
'comment': msg }
|
'comment': msg }
|
||||||
if self.islink() and self.isexpanded():
|
if self.islink() and self.isexpanded():
|
||||||
query['keeplink'] = '1'
|
query['keeplink'] = '1'
|
||||||
|
if self.islinkrepair():
|
||||||
|
query['repairlink'] = '1'
|
||||||
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
||||||
|
try:
|
||||||
f = http_POST(u)
|
f = http_POST(u)
|
||||||
|
except urllib2.HTTPError, e:
|
||||||
|
e.osc_msg = 'commit failed'
|
||||||
|
# delete upload revision
|
||||||
|
try:
|
||||||
|
query = { 'cmd': 'deleteuploadrev' }
|
||||||
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
||||||
|
f = http_POST(u)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise e
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
self.rev = int(root.get('rev'))
|
self.rev = int(root.get('rev'))
|
||||||
print
|
print
|
||||||
print 'Committed revision %s.' % self.rev
|
print 'Committed revision %s.' % self.rev
|
||||||
|
|
||||||
|
if self.islinkrepair():
|
||||||
|
os.unlink(os.path.join(self.storedir, '_linkrepair'))
|
||||||
|
self.linkrepair = False
|
||||||
|
# XXX: mark package as invalid?
|
||||||
if self.islink() and self.isexpanded():
|
if self.islink() and self.isexpanded():
|
||||||
self.update_local_filesmeta(revision=show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name))
|
self.update_local_filesmeta(revision=self.latest_rev())
|
||||||
else:
|
else:
|
||||||
self.update_local_filesmeta()
|
self.update_local_filesmeta()
|
||||||
self.update_datastructs()
|
self.update_datastructs()
|
||||||
@ -853,7 +871,7 @@ class Package:
|
|||||||
Update the local _files file in the store.
|
Update the local _files file in the store.
|
||||||
It is replaced with the version pulled from upstream.
|
It is replaced with the version pulled from upstream.
|
||||||
"""
|
"""
|
||||||
meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision))
|
meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
|
||||||
f = open(os.path.join(self.storedir, '_files'), 'w')
|
f = open(os.path.join(self.storedir, '_files'), 'w')
|
||||||
f.write(meta)
|
f.write(meta)
|
||||||
f.close()
|
f.close()
|
||||||
@ -890,6 +908,7 @@ class Package:
|
|||||||
|
|
||||||
self.to_be_deleted = read_tobedeleted(self.dir)
|
self.to_be_deleted = read_tobedeleted(self.dir)
|
||||||
self.in_conflict = read_inconflict(self.dir)
|
self.in_conflict = read_inconflict(self.dir)
|
||||||
|
self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
|
||||||
|
|
||||||
# gather unversioned files, but ignore some stuff
|
# gather unversioned files, but ignore some stuff
|
||||||
self.excluded = [ i for i in os.listdir(self.dir)
|
self.excluded = [ i for i in os.listdir(self.dir)
|
||||||
@ -910,6 +929,10 @@ class Package:
|
|||||||
Returns True if the package is expanded, otherwise False."""
|
Returns True if the package is expanded, otherwise False."""
|
||||||
return self.linkinfo.isexpanded()
|
return self.linkinfo.isexpanded()
|
||||||
|
|
||||||
|
def islinkrepair(self):
|
||||||
|
"""tells us if we are repairing a broken source link."""
|
||||||
|
return self.linkrepair
|
||||||
|
|
||||||
def haslinkerror(self):
|
def haslinkerror(self):
|
||||||
"""
|
"""
|
||||||
Returns True if the link is broken otherwise False.
|
Returns True if the link is broken otherwise False.
|
||||||
@ -1117,6 +1140,15 @@ rev: %s
|
|||||||
|
|
||||||
os.unlink(filename)
|
os.unlink(filename)
|
||||||
|
|
||||||
|
def latest_rev(self):
|
||||||
|
if self.islinkrepair():
|
||||||
|
upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
|
||||||
|
elif self.islink() and self.isexpanded():
|
||||||
|
upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
|
||||||
|
else:
|
||||||
|
upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
|
||||||
|
return upstream_rev
|
||||||
|
|
||||||
def update(self, rev = None):
|
def update(self, rev = None):
|
||||||
# save filelist and (modified) status before replacing the meta file
|
# save filelist and (modified) status before replacing the meta file
|
||||||
saved_filenames = self.filenamelist
|
saved_filenames = self.filenamelist
|
||||||
@ -1817,21 +1849,27 @@ def edit_meta(metatype,
|
|||||||
f.sync()
|
f.sync()
|
||||||
|
|
||||||
|
|
||||||
def show_files_meta(apiurl, prj, pac, revision=None):
|
def show_files_meta(apiurl, prj, pac, expand=False, revision=None, linkrev=None, linkrepair=False):
|
||||||
query = None
|
query = None
|
||||||
if revision:
|
if revision:
|
||||||
query = { 'rev': revision }
|
query = { 'rev': revision }
|
||||||
|
if linkrev:
|
||||||
|
query = { 'linkrev': linkrev }
|
||||||
|
if expand:
|
||||||
|
query = { 'expand': 1 }
|
||||||
|
if linkrepair:
|
||||||
|
query = { 'emptylink': 1 }
|
||||||
f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
|
f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
|
||||||
return f.readlines()
|
return f.readlines()
|
||||||
|
|
||||||
|
|
||||||
def show_upstream_srcmd5(apiurl, prj, pac):
|
def show_upstream_srcmd5(apiurl, prj, pac, expand=False):
|
||||||
m = show_files_meta(apiurl, prj, pac)
|
m = show_files_meta(apiurl, prj, pac, expand=expand)
|
||||||
return ET.parse(StringIO(''.join(m))).getroot().get('srcmd5')
|
return ET.parse(StringIO(''.join(m))).getroot().get('srcmd5')
|
||||||
|
|
||||||
|
|
||||||
def show_upstream_xsrcmd5(apiurl, prj, pac):
|
def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
|
||||||
m = show_files_meta(apiurl, prj, pac)
|
m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
|
||||||
try:
|
try:
|
||||||
# only source link packages have a <linkinfo> element.
|
# only source link packages have a <linkinfo> element.
|
||||||
li_node = ET.parse(StringIO(''.join(m))).getroot().find('linkinfo')
|
li_node = ET.parse(StringIO(''.join(m))).getroot().find('linkinfo')
|
||||||
@ -1843,7 +1881,6 @@ def show_upstream_xsrcmd5(apiurl, prj, pac):
|
|||||||
|
|
||||||
if li.haserror():
|
if li.haserror():
|
||||||
raise oscerr.LinkExpandError, li.error
|
raise oscerr.LinkExpandError, li.error
|
||||||
else:
|
|
||||||
return li.xsrcmd5
|
return li.xsrcmd5
|
||||||
|
|
||||||
|
|
||||||
@ -2387,10 +2424,10 @@ def checkout_package(apiurl, project, package,
|
|||||||
|
|
||||||
if expand_link:
|
if expand_link:
|
||||||
# try to read from the linkinfo
|
# try to read from the linkinfo
|
||||||
x = show_upstream_xsrcmd5(apiurl, project, package)
|
# if it is a link we use the xsrcmd5 as the revision to be
|
||||||
if x:
|
|
||||||
# it is a link - thus, we use the xsrcmd5 as the revision to be
|
|
||||||
# checked out
|
# checked out
|
||||||
|
x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
|
||||||
|
if x:
|
||||||
revision = x
|
revision = x
|
||||||
os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
|
os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
|
||||||
init_package_dir(apiurl, project, package, store, revision)
|
init_package_dir(apiurl, project, package, store, revision)
|
||||||
@ -2986,6 +3023,10 @@ def store_read_apiurl(dir):
|
|||||||
#store_write_apiurl(dir, apiurl)
|
#store_write_apiurl(dir, apiurl)
|
||||||
return apiurl
|
return apiurl
|
||||||
|
|
||||||
|
def store_write_string(dir, file, string):
|
||||||
|
fname = os.path.join(dir, store, file)
|
||||||
|
open(fname, 'w').write(string)
|
||||||
|
|
||||||
def store_write_project(dir, project):
|
def store_write_project(dir, project):
|
||||||
fname = os.path.join(dir, store, '_project')
|
fname = os.path.join(dir, store, '_project')
|
||||||
open(fname, 'w').write(project + '\n')
|
open(fname, 'w').write(project + '\n')
|
||||||
|
Loading…
Reference in New Issue
Block a user