mirror of
https://github.com/openSUSE/osc.git
synced 2025-01-12 08:56:13 +01:00
Merge pull request #1536 from dmach/split-obs_scm
Move most of the OBS SCM code to obs_scm submodule
This commit is contained in:
commit
c39b648615
2684
osc/core.py
2684
osc/core.py
File diff suppressed because it is too large
Load Diff
6
osc/obs_scm/__init__.py
Normal file
6
osc/obs_scm/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from .file import File
|
||||
from .linkinfo import Linkinfo
|
||||
from .package import Package
|
||||
from .project import Project
|
||||
from .serviceinfo import Serviceinfo
|
||||
from .store import Store
|
57
osc/obs_scm/file.py
Normal file
57
osc/obs_scm/file.py
Normal file
@ -0,0 +1,57 @@
|
||||
from functools import total_ordering
|
||||
|
||||
from ..util.xml import ET
|
||||
|
||||
|
||||
@total_ordering
|
||||
class File:
|
||||
"""represent a file, including its metadata"""
|
||||
|
||||
def __init__(self, name, md5, size, mtime, skipped=False):
|
||||
self.name = name
|
||||
self.md5 = md5
|
||||
self.size = size
|
||||
self.mtime = mtime
|
||||
self.skipped = skipped
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, str):
|
||||
return self.name == other
|
||||
self_data = (self.name, self.md5, self.size, self.mtime, self.skipped)
|
||||
other_data = (other.name, other.md5, other.size, other.mtime, other.skipped)
|
||||
return self_data == other_data
|
||||
|
||||
def __lt__(self, other):
|
||||
self_data = (self.name, self.md5, self.size, self.mtime, self.skipped)
|
||||
other_data = (other.name, other.md5, other.size, other.mtime, other.skipped)
|
||||
return self_data < other_data
|
||||
|
||||
@classmethod
|
||||
def from_xml_node(cls, node):
|
||||
assert node.tag == "entry"
|
||||
kwargs = {
|
||||
"name": node.get("name"),
|
||||
"md5": node.get("md5"),
|
||||
"size": int(node.get("size")),
|
||||
"mtime": int(node.get("mtime")),
|
||||
"skipped": "skipped" in node.attrib,
|
||||
}
|
||||
return cls(**kwargs)
|
||||
|
||||
def to_xml_node(self, parent_node):
|
||||
attributes = {
|
||||
"name": self.name,
|
||||
"md5": self.md5,
|
||||
"size": str(int(self.size)),
|
||||
"mtime": str(int(self.mtime)),
|
||||
}
|
||||
if self.skipped:
|
||||
attributes["skipped"] = "true"
|
||||
new_node = ET.SubElement(parent_node, "entry", attributes)
|
||||
return new_node
|
63
osc/obs_scm/linkinfo.py
Normal file
63
osc/obs_scm/linkinfo.py
Normal file
@ -0,0 +1,63 @@
|
||||
class Linkinfo:
|
||||
"""linkinfo metadata (which is part of the xml representing a directory)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""creates an empty linkinfo instance"""
|
||||
self.project = None
|
||||
self.package = None
|
||||
self.xsrcmd5 = None
|
||||
self.lsrcmd5 = None
|
||||
self.srcmd5 = None
|
||||
self.error = None
|
||||
self.rev = None
|
||||
self.baserev = None
|
||||
|
||||
def read(self, linkinfo_node):
|
||||
"""read in the linkinfo metadata from the ``<linkinfo>`` element passed as
|
||||
elementtree node.
|
||||
If the passed element is ``None``, the method does nothing.
|
||||
"""
|
||||
if linkinfo_node is None:
|
||||
return
|
||||
self.project = linkinfo_node.get('project')
|
||||
self.package = linkinfo_node.get('package')
|
||||
self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
|
||||
self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
|
||||
self.srcmd5 = linkinfo_node.get('srcmd5')
|
||||
self.error = linkinfo_node.get('error')
|
||||
self.rev = linkinfo_node.get('rev')
|
||||
self.baserev = linkinfo_node.get('baserev')
|
||||
|
||||
def islink(self):
|
||||
""":return: ``True`` if the linkinfo is not empty, otherwise ``False``"""
|
||||
if self.xsrcmd5 or self.lsrcmd5 or self.error is not None:
|
||||
return True
|
||||
return False
|
||||
|
||||
def isexpanded(self):
|
||||
""":return: ``True`` if the package is an expanded link"""
|
||||
if self.lsrcmd5 and not self.xsrcmd5:
|
||||
return True
|
||||
return False
|
||||
|
||||
def haserror(self):
|
||||
""":return: ``True`` if the link is in error state (could not be applied)"""
|
||||
if self.error:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __str__(self):
|
||||
"""return an informatory string representation"""
|
||||
if self.islink() and not self.isexpanded():
|
||||
return 'project %s, package %s, xsrcmd5 %s, rev %s' \
|
||||
% (self.project, self.package, self.xsrcmd5, self.rev)
|
||||
elif self.islink() and self.isexpanded():
|
||||
if self.haserror():
|
||||
return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
|
||||
% (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
|
||||
else:
|
||||
return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
|
||||
% (self.project, self.package, self.srcmd5, self.lsrcmd5)
|
||||
else:
|
||||
return 'None'
|
1588
osc/obs_scm/package.py
Normal file
1588
osc/obs_scm/package.py
Normal file
File diff suppressed because it is too large
Load Diff
626
osc/obs_scm/project.py
Normal file
626
osc/obs_scm/project.py
Normal file
@ -0,0 +1,626 @@
|
||||
import fnmatch
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .. import conf
|
||||
from .. import oscerr
|
||||
from ..util.xml import ET
|
||||
from .store import Store
|
||||
from .store import delete_storedir
|
||||
from .store import store
|
||||
from .store import store_read_package
|
||||
from .store import store_read_project
|
||||
from .store import store_write_initial_packages
|
||||
from .store import store_write_project
|
||||
from .store import store_write_string
|
||||
from .store import is_package_dir
|
||||
|
||||
|
||||
class Project:
|
||||
"""
|
||||
Represent a checked out project directory, holding packages.
|
||||
|
||||
:Attributes:
|
||||
``dir``
|
||||
The directory path containing the project.
|
||||
|
||||
``name``
|
||||
The name of the project.
|
||||
|
||||
``apiurl``
|
||||
The endpoint URL of the API server.
|
||||
|
||||
``pacs_available``
|
||||
List of names of packages available server-side.
|
||||
This is only populated if ``getPackageList`` is set
|
||||
to ``True`` in the constructor.
|
||||
|
||||
``pacs_have``
|
||||
List of names of packages which exist server-side
|
||||
and exist in the local project working copy (if
|
||||
'do_package_tracking' is disabled).
|
||||
If 'do_package_tracking' is enabled it represents the
|
||||
list names of packages which are tracked in the project
|
||||
working copy (that is it might contain packages which
|
||||
exist on the server as well as packages which do not
|
||||
exist on the server (for instance if the local package
|
||||
was added or if the package was removed on the server-side)).
|
||||
|
||||
``pacs_excluded``
|
||||
List of names of packages in the local project directory
|
||||
which are excluded by the `exclude_glob` configuration
|
||||
variable. Only set if `do_package_tracking` is enabled.
|
||||
|
||||
``pacs_unvers``
|
||||
List of names of packages in the local project directory
|
||||
which are not tracked. Only set if `do_package_tracking`
|
||||
is enabled.
|
||||
|
||||
``pacs_broken``
|
||||
List of names of packages which are tracked but do not
|
||||
exist in the local project working copy. Only set if
|
||||
`do_package_tracking` is enabled.
|
||||
|
||||
``pacs_missing``
|
||||
List of names of packages which exist server-side but
|
||||
are not expected to exist in the local project directory.
|
||||
"""
|
||||
|
||||
REQ_STOREFILES = ('_project', '_apiurl')
|
||||
|
||||
def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:Parameters:
|
||||
`dir` : str
|
||||
The directory path containing the checked out project.
|
||||
|
||||
`getPackageList` : bool
|
||||
Set to `False` if you want to skip retrieval from the
|
||||
server of the list of packages in the project .
|
||||
|
||||
`wc_check` : bool
|
||||
"""
|
||||
from ..core import meta_get_packagelist
|
||||
|
||||
self.dir = Path(dir)
|
||||
self.absdir = os.path.abspath(dir)
|
||||
self.store = Store(dir)
|
||||
self.progress_obj = progress_obj
|
||||
|
||||
self.name = store_read_project(self.dir)
|
||||
self.scm_url = self.store.scmurl
|
||||
self.apiurl = self.store.apiurl
|
||||
|
||||
dirty_files = []
|
||||
if wc_check:
|
||||
dirty_files = self.wc_check()
|
||||
if dirty_files:
|
||||
msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
|
||||
'Please run \'osc repairwc %s\' and check the state\n' \
|
||||
'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
|
||||
raise oscerr.WorkingCopyInconsistent(self.name, None, dirty_files, msg)
|
||||
|
||||
if getPackageList:
|
||||
self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
|
||||
else:
|
||||
self.pacs_available = []
|
||||
|
||||
if conf.config['do_package_tracking']:
|
||||
self.pac_root = self.read_packages().getroot()
|
||||
self.pacs_have = [pac.get('name') for pac in self.pac_root.findall('package')]
|
||||
self.pacs_excluded = [i for i in os.listdir(self.dir)
|
||||
for j in conf.config['exclude_glob']
|
||||
if fnmatch.fnmatch(i, j)]
|
||||
self.pacs_unvers = [i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded]
|
||||
# store all broken packages (e.g. packages which where removed by a non-osc cmd)
|
||||
# in the self.pacs_broken list
|
||||
self.pacs_broken = []
|
||||
for p in self.pacs_have:
|
||||
if not os.path.isdir(os.path.join(self.absdir, p)):
|
||||
# all states will be replaced with the '!'-state
|
||||
# (except it is already marked as deleted ('D'-state))
|
||||
self.pacs_broken.append(p)
|
||||
else:
|
||||
self.pacs_have = [i for i in os.listdir(self.dir) if i in self.pacs_available]
|
||||
|
||||
self.pacs_missing = [i for i in self.pacs_available if i not in self.pacs_have]
|
||||
|
||||
def wc_check(self):
|
||||
global store
|
||||
dirty_files = []
|
||||
req_storefiles = Project.REQ_STOREFILES
|
||||
if conf.config['do_package_tracking'] and self.scm_url is None:
|
||||
req_storefiles += ('_packages',)
|
||||
for fname in req_storefiles:
|
||||
if not os.path.exists(os.path.join(self.absdir, store, fname)):
|
||||
dirty_files.append(fname)
|
||||
return dirty_files
|
||||
|
||||
def wc_repair(self, apiurl: Optional[str] = None):
|
||||
store = Store(self.dir)
|
||||
store.assert_is_project()
|
||||
if not store.exists("_apiurl") or apiurl:
|
||||
if apiurl is None:
|
||||
msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
|
||||
'no \'apiurl\' was passed to wc_repair'
|
||||
# hmm should we raise oscerr.WrongArgs?
|
||||
raise oscerr.WorkingCopyInconsistent(self.name, None, [], msg)
|
||||
# sanity check
|
||||
conf.parse_apisrv_url(None, apiurl)
|
||||
store.apiurl = apiurl
|
||||
self.apiurl = apiurl
|
||||
|
||||
def checkout_missing_pacs(self, sinfos, expand_link=False, unexpand_link=False):
|
||||
from ..core import checkout_package
|
||||
from ..core import getTransActPath
|
||||
|
||||
for pac in self.pacs_missing:
|
||||
if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
|
||||
# pac is not under version control but a local file/dir exists
|
||||
msg = f'can\'t add package \'{pac}\': Object already exists'
|
||||
raise oscerr.PackageExists(self.name, pac, msg)
|
||||
|
||||
if not (expand_link or unexpand_link):
|
||||
sinfo = sinfos.get(pac)
|
||||
if sinfo is None:
|
||||
# should never happen...
|
||||
continue
|
||||
linked = sinfo.find('linked')
|
||||
if linked is not None and linked.get('project') == self.name:
|
||||
# hmm what about a linkerror (sinfo.get('lsrcmd5') is None)?
|
||||
# Should we skip the package as well or should we it out?
|
||||
# let's skip it for now
|
||||
print(f"Skipping {pac} (link to package {linked.get('package')})")
|
||||
continue
|
||||
|
||||
print(f'checking out new package {pac}')
|
||||
checkout_package(self.apiurl, self.name, pac,
|
||||
pathname=getTransActPath(os.path.join(self.dir, pac)),
|
||||
prj_obj=self, prj_dir=self.dir,
|
||||
expand_link=expand_link or not unexpand_link, progress_obj=self.progress_obj)
|
||||
|
||||
def status(self, pac: str):
|
||||
exists = os.path.exists(os.path.join(self.absdir, pac))
|
||||
st = self.get_state(pac)
|
||||
if st is None and exists:
|
||||
return '?'
|
||||
elif st is None:
|
||||
raise oscerr.OscIOError(None, f'osc: \'{pac}\' is not under version control')
|
||||
elif st in ('A', ' ') and not exists:
|
||||
return '!'
|
||||
elif st == 'D' and not exists:
|
||||
return 'D'
|
||||
else:
|
||||
return st
|
||||
|
||||
def get_status(self, *exclude_states):
|
||||
res = []
|
||||
for pac in self.pacs_have:
|
||||
st = self.status(pac)
|
||||
if st not in exclude_states:
|
||||
res.append((st, pac))
|
||||
if '?' not in exclude_states:
|
||||
res.extend([('?', pac) for pac in self.pacs_unvers])
|
||||
return res
|
||||
|
||||
def get_pacobj(self, pac, *pac_args, **pac_kwargs):
|
||||
from ..core import Package
|
||||
|
||||
try:
|
||||
st = self.status(pac)
|
||||
if st in ('?', '!') or st == 'D' and not os.path.exists(os.path.join(self.dir, pac)):
|
||||
return None
|
||||
return Package(os.path.join(self.dir, pac), *pac_args, **pac_kwargs)
|
||||
except oscerr.OscIOError:
|
||||
return None
|
||||
|
||||
def set_state(self, pac, state):
|
||||
node = self.get_package_node(pac)
|
||||
if node is None:
|
||||
self.new_package_entry(pac, state)
|
||||
else:
|
||||
node.set('state', state)
|
||||
|
||||
def get_package_node(self, pac: str):
|
||||
for node in self.pac_root.findall('package'):
|
||||
if pac == node.get('name'):
|
||||
return node
|
||||
return None
|
||||
|
||||
def del_package_node(self, pac):
|
||||
for node in self.pac_root.findall('package'):
|
||||
if pac == node.get('name'):
|
||||
self.pac_root.remove(node)
|
||||
|
||||
def get_state(self, pac: str):
|
||||
node = self.get_package_node(pac)
|
||||
if node is not None:
|
||||
return node.get('state')
|
||||
else:
|
||||
return None
|
||||
|
||||
def new_package_entry(self, name, state):
|
||||
ET.SubElement(self.pac_root, 'package', name=name, state=state)
|
||||
|
||||
def read_packages(self):
|
||||
"""
|
||||
Returns an ``xml.etree.ElementTree`` object representing the
|
||||
parsed contents of the project's ``.osc/_packages`` XML file.
|
||||
"""
|
||||
from ..core import Package
|
||||
from ..core import meta_get_packagelist
|
||||
|
||||
global store
|
||||
|
||||
packages_file = os.path.join(self.absdir, store, '_packages')
|
||||
if os.path.isfile(packages_file) and os.path.getsize(packages_file):
|
||||
try:
|
||||
result = ET.parse(packages_file)
|
||||
except:
|
||||
msg = f'Cannot read package file \'{packages_file}\'. '
|
||||
msg += 'You can try to remove it and then run osc repairwc.'
|
||||
raise oscerr.OscIOError(None, msg)
|
||||
return result
|
||||
else:
|
||||
# scan project for existing packages and migrate them
|
||||
cur_pacs = []
|
||||
for data in os.listdir(self.dir):
|
||||
pac_dir = os.path.join(self.absdir, data)
|
||||
# we cannot use self.pacs_available because we cannot guarantee that the package list
|
||||
# was fetched from the server
|
||||
if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
|
||||
and Package(pac_dir).name == data:
|
||||
cur_pacs.append(ET.Element('package', name=data, state=' '))
|
||||
store_write_initial_packages(self.absdir, self.name, cur_pacs)
|
||||
return ET.parse(os.path.join(self.absdir, store, '_packages'))
|
||||
|
||||
def write_packages(self):
|
||||
from ..core import ET_ENCODING
|
||||
from ..core import xmlindent
|
||||
|
||||
xmlindent(self.pac_root)
|
||||
store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root, encoding=ET_ENCODING))
|
||||
|
||||
def addPackage(self, pac):
|
||||
for i in conf.config['exclude_glob']:
|
||||
if fnmatch.fnmatch(pac, i):
|
||||
msg = f'invalid package name: \'{pac}\' (see \'exclude_glob\' config option)'
|
||||
raise oscerr.OscIOError(None, msg)
|
||||
state = self.get_state(pac)
|
||||
if state is None or state == 'D':
|
||||
self.new_package_entry(pac, 'A')
|
||||
self.write_packages()
|
||||
# sometimes the new pac doesn't exist in the list because
|
||||
# it would take too much time to update all data structs regularly
|
||||
if pac in self.pacs_unvers:
|
||||
self.pacs_unvers.remove(pac)
|
||||
else:
|
||||
raise oscerr.PackageExists(self.name, pac, f'package \'{pac}\' is already under version control')
|
||||
|
||||
def delPackage(self, pac, force=False):
|
||||
from ..core import delete_dir
|
||||
from ..core import getTransActPath
|
||||
from ..core import statfrmt
|
||||
|
||||
state = self.get_state(pac.name)
|
||||
can_delete = True
|
||||
if state == ' ' or state == 'D':
|
||||
del_files = []
|
||||
for filename in pac.filenamelist + pac.filenamelist_unvers:
|
||||
filestate = pac.status(filename)
|
||||
if filestate == 'M' or filestate == 'C' or \
|
||||
filestate == 'A' or filestate == '?':
|
||||
can_delete = False
|
||||
else:
|
||||
del_files.append(filename)
|
||||
if can_delete or force:
|
||||
for filename in del_files:
|
||||
pac.delete_localfile(filename)
|
||||
if pac.status(filename) != '?':
|
||||
# this is not really necessary
|
||||
pac.put_on_deletelist(filename)
|
||||
print(statfrmt('D', getTransActPath(os.path.join(pac.dir, filename))))
|
||||
print(statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name))))
|
||||
pac.write_deletelist()
|
||||
self.set_state(pac.name, 'D')
|
||||
self.write_packages()
|
||||
else:
|
||||
print(f'package \'{pac.name}\' has local modifications (see osc st for details)')
|
||||
elif state == 'A':
|
||||
if force:
|
||||
delete_dir(pac.absdir)
|
||||
self.del_package_node(pac.name)
|
||||
self.write_packages()
|
||||
print(statfrmt('D', pac.name))
|
||||
else:
|
||||
print(f'package \'{pac.name}\' has local modifications (see osc st for details)')
|
||||
elif state is None:
|
||||
print('package is not under version control')
|
||||
else:
|
||||
print('unsupported state')
|
||||
|
||||
def update(self, pacs=(), expand_link=False, unexpand_link=False, service_files=False):
|
||||
from ..core import Package
|
||||
from ..core import checkout_package
|
||||
from ..core import get_project_sourceinfo
|
||||
from ..core import getTransActPath
|
||||
from ..core import show_upstream_xsrcmd5
|
||||
|
||||
if pacs:
|
||||
for pac in pacs:
|
||||
Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
|
||||
else:
|
||||
# we need to make sure that the _packages file will be written (even if an exception
|
||||
# occurs)
|
||||
try:
|
||||
# update complete project
|
||||
# packages which no longer exists upstream
|
||||
upstream_del = [pac for pac in self.pacs_have if pac not in self.pacs_available and self.get_state(pac) != 'A']
|
||||
sinfo_pacs = [pac for pac in self.pacs_have if self.get_state(pac) in (' ', 'D') and pac not in self.pacs_broken]
|
||||
sinfo_pacs.extend(self.pacs_missing)
|
||||
sinfos = get_project_sourceinfo(self.apiurl, self.name, True, *sinfo_pacs)
|
||||
|
||||
for pac in upstream_del:
|
||||
if self.status(pac) != '!':
|
||||
p = Package(os.path.join(self.dir, pac))
|
||||
self.delPackage(p, force=True)
|
||||
delete_storedir(p.storedir)
|
||||
try:
|
||||
os.rmdir(pac)
|
||||
except:
|
||||
pass
|
||||
self.pac_root.remove(self.get_package_node(pac))
|
||||
self.pacs_have.remove(pac)
|
||||
|
||||
for pac in self.pacs_have:
|
||||
state = self.get_state(pac)
|
||||
if pac in self.pacs_broken:
|
||||
if self.get_state(pac) != 'A':
|
||||
checkout_package(self.apiurl, self.name, pac,
|
||||
pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self,
|
||||
prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
|
||||
elif state == ' ':
|
||||
# do a simple update
|
||||
p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
|
||||
rev = None
|
||||
needs_update = True
|
||||
if p.scm_url is not None:
|
||||
# git managed.
|
||||
print("Skipping git managed package ", pac)
|
||||
continue
|
||||
elif expand_link and p.islink() and not p.isexpanded():
|
||||
if p.haslinkerror():
|
||||
try:
|
||||
rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
|
||||
except:
|
||||
rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
|
||||
p.mark_frozen()
|
||||
else:
|
||||
rev = p.linkinfo.xsrcmd5
|
||||
print('Expanding to rev', rev)
|
||||
elif unexpand_link and p.islink() and p.isexpanded():
|
||||
rev = p.linkinfo.lsrcmd5
|
||||
print('Unexpanding to rev', rev)
|
||||
elif p.islink() and p.isexpanded():
|
||||
needs_update = p.update_needed(sinfos[p.name])
|
||||
if needs_update:
|
||||
rev = p.latest_rev()
|
||||
elif p.hasserviceinfo() and p.serviceinfo.isexpanded() and not service_files:
|
||||
# FIXME: currently, do_update does not propagate the --server-side-source-service-files
|
||||
# option to this method. Consequence: an expanded service is always unexpanded during
|
||||
# an update (TODO: discuss if this is a reasonable behavior (at least this the default
|
||||
# behavior for a while))
|
||||
needs_update = True
|
||||
else:
|
||||
needs_update = p.update_needed(sinfos[p.name])
|
||||
print(f'Updating {p.name}')
|
||||
if needs_update:
|
||||
p.update(rev, service_files)
|
||||
else:
|
||||
print(f'At revision {p.rev}.')
|
||||
if unexpand_link:
|
||||
p.unmark_frozen()
|
||||
elif state == 'D':
|
||||
# pac exists (the non-existent pac case was handled in the first if block)
|
||||
p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
|
||||
if p.update_needed(sinfos[p.name]):
|
||||
p.update()
|
||||
elif state == 'A' and pac in self.pacs_available:
|
||||
# file/dir called pac already exists and is under version control
|
||||
msg = f'can\'t add package \'{pac}\': Object already exists'
|
||||
raise oscerr.PackageExists(self.name, pac, msg)
|
||||
elif state == 'A':
|
||||
# do nothing
|
||||
pass
|
||||
else:
|
||||
print(f'unexpected state.. package \'{pac}\'')
|
||||
|
||||
self.checkout_missing_pacs(sinfos, expand_link, unexpand_link)
|
||||
finally:
|
||||
self.write_packages()
|
||||
|
||||
def commit(self, pacs=(), msg='', files=None, verbose=False, skip_local_service_run=False, can_branch=False, force=False):
|
||||
from ..core import Package
|
||||
from ..core import os_path_samefile
|
||||
|
||||
files = files or {}
|
||||
if pacs:
|
||||
try:
|
||||
for pac in pacs:
|
||||
todo = []
|
||||
if pac in files:
|
||||
todo = files[pac]
|
||||
state = self.get_state(pac)
|
||||
if state == 'A':
|
||||
self.commitNewPackage(pac, msg, todo, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
elif state == 'D':
|
||||
self.commitDelPackage(pac, force=force)
|
||||
elif state == ' ':
|
||||
# display the correct dir when sending the changes
|
||||
if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
|
||||
p = Package('.')
|
||||
else:
|
||||
p = Package(os.path.join(self.dir, pac))
|
||||
p.todo = todo
|
||||
p.commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run, can_branch=can_branch, force=force)
|
||||
elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
|
||||
print(f'osc: \'{pac}\' is not under version control')
|
||||
elif pac in self.pacs_broken or not os.path.exists(os.path.join(self.dir, pac)):
|
||||
print(f'osc: \'{pac}\' package not found')
|
||||
elif state is None:
|
||||
self.commitExtPackage(pac, msg, todo, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
finally:
|
||||
self.write_packages()
|
||||
else:
|
||||
# if we have packages marked as '!' we cannot commit
|
||||
for pac in self.pacs_broken:
|
||||
if self.get_state(pac) != 'D':
|
||||
msg = f'commit failed: package \'{pac}\' is missing'
|
||||
raise oscerr.PackageMissing(self.name, pac, msg)
|
||||
try:
|
||||
for pac in self.pacs_have:
|
||||
state = self.get_state(pac)
|
||||
if state == ' ':
|
||||
# do a simple commit
|
||||
Package(os.path.join(self.dir, pac)).commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
elif state == 'D':
|
||||
self.commitDelPackage(pac, force=force)
|
||||
elif state == 'A':
|
||||
self.commitNewPackage(pac, msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
finally:
|
||||
self.write_packages()
|
||||
|
||||
def commitNewPackage(self, pac, msg='', files=None, verbose=False, skip_local_service_run=False):
|
||||
"""creates and commits a new package if it does not exist on the server"""
|
||||
from ..core import Package
|
||||
from ..core import edit_meta
|
||||
from ..core import os_path_samefile
|
||||
from ..core import statfrmt
|
||||
|
||||
files = files or []
|
||||
if pac in self.pacs_available:
|
||||
print(f'package \'{pac}\' already exists')
|
||||
else:
|
||||
user = conf.get_apiurl_usr(self.apiurl)
|
||||
edit_meta(metatype='pkg',
|
||||
path_args=(self.name, pac),
|
||||
template_args=({
|
||||
'name': pac,
|
||||
'user': user}),
|
||||
apiurl=self.apiurl)
|
||||
# display the correct dir when sending the changes
|
||||
olddir = os.getcwd()
|
||||
if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
|
||||
os.chdir(os.pardir)
|
||||
p = Package(pac)
|
||||
else:
|
||||
p = Package(os.path.join(self.dir, pac))
|
||||
p.todo = files
|
||||
print(statfrmt('Sending', os.path.normpath(p.dir)))
|
||||
p.commit(msg=msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
self.set_state(pac, ' ')
|
||||
os.chdir(olddir)
|
||||
|
||||
def commitDelPackage(self, pac, force=False):
|
||||
"""deletes a package on the server and in the working copy"""
|
||||
|
||||
from ..core import Package
|
||||
from ..core import delete_package
|
||||
from ..core import getTransActPath
|
||||
from ..core import os_path_samefile
|
||||
from ..core import statfrmt
|
||||
|
||||
try:
|
||||
# display the correct dir when sending the changes
|
||||
if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
|
||||
pac_dir = pac
|
||||
else:
|
||||
pac_dir = os.path.join(self.dir, pac)
|
||||
p = Package(os.path.join(self.dir, pac))
|
||||
# print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
|
||||
delete_storedir(p.storedir)
|
||||
try:
|
||||
os.rmdir(p.dir)
|
||||
except:
|
||||
pass
|
||||
except OSError:
|
||||
pac_dir = os.path.join(self.dir, pac)
|
||||
except (oscerr.NoWorkingCopy, oscerr.WorkingCopyOutdated, oscerr.PackageError):
|
||||
pass
|
||||
# print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
|
||||
print(statfrmt('Deleting', getTransActPath(pac_dir)))
|
||||
delete_package(self.apiurl, self.name, pac, force=force)
|
||||
self.del_package_node(pac)
|
||||
|
||||
def commitExtPackage(self, pac, msg, files=None, verbose=False, skip_local_service_run=False):
|
||||
"""commits a package from an external project"""
|
||||
|
||||
from ..core import Package
|
||||
from ..core import edit_meta
|
||||
from ..core import meta_exists
|
||||
from ..core import os_path_samefile
|
||||
|
||||
files = files or []
|
||||
if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
|
||||
pac_path = '.'
|
||||
else:
|
||||
pac_path = os.path.join(self.dir, pac)
|
||||
|
||||
store = Store(pac_path)
|
||||
project = store_read_project(pac_path)
|
||||
package = store_read_package(pac_path)
|
||||
apiurl = store.apiurl
|
||||
if not meta_exists(metatype='pkg',
|
||||
path_args=(project, package),
|
||||
template_args=None, create_new=False, apiurl=apiurl):
|
||||
user = conf.get_apiurl_usr(self.apiurl)
|
||||
edit_meta(metatype='pkg',
|
||||
path_args=(project, package),
|
||||
template_args=({'name': pac, 'user': user}), apiurl=apiurl)
|
||||
p = Package(pac_path)
|
||||
p.todo = files
|
||||
p.commit(msg=msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||
|
||||
def __str__(self):
|
||||
r = []
|
||||
r.append('*****************************************************')
|
||||
r.append(f'Project {self.name} (dir={self.dir}, absdir={self.absdir})')
|
||||
r.append(f"have pacs:\n{', '.join(self.pacs_have)}")
|
||||
r.append(f"missing pacs:\n{', '.join(self.pacs_missing)}")
|
||||
r.append('*****************************************************')
|
||||
return '\n'.join(r)
|
||||
|
||||
@staticmethod
|
||||
def init_project(
|
||||
apiurl: str,
|
||||
dir: Path,
|
||||
project,
|
||||
package_tracking=True,
|
||||
getPackageList=True,
|
||||
progress_obj=None,
|
||||
wc_check=True,
|
||||
scm_url=None,
|
||||
):
|
||||
global store
|
||||
|
||||
if not os.path.exists(dir):
|
||||
# use makedirs (checkout_no_colon config option might be enabled)
|
||||
os.makedirs(dir)
|
||||
elif not os.path.isdir(dir):
|
||||
raise oscerr.OscIOError(None, f'error: \'{dir}\' is no directory')
|
||||
if os.path.exists(os.path.join(dir, store)):
|
||||
raise oscerr.OscIOError(None, f'error: \'{dir}\' is already an initialized osc working copy')
|
||||
else:
|
||||
os.mkdir(os.path.join(dir, store))
|
||||
|
||||
store_write_project(dir, project)
|
||||
Store(dir).apiurl = apiurl
|
||||
if scm_url:
|
||||
Store(dir).scmurl = scm_url
|
||||
package_tracking = None
|
||||
if package_tracking:
|
||||
store_write_initial_packages(dir, project, [])
|
||||
return Project(dir, getPackageList, progress_obj, wc_check)
|
252
osc/obs_scm/serviceinfo.py
Normal file
252
osc/obs_scm/serviceinfo.py
Normal file
@ -0,0 +1,252 @@
|
||||
import hashlib
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
from typing import Optional
|
||||
from urllib.error import HTTPError
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .. import oscerr
|
||||
from .. import output
|
||||
from ..util.xml import ET
|
||||
|
||||
|
||||
class Serviceinfo:
|
||||
"""Source service content
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""creates an empty serviceinfo instance"""
|
||||
self.services = []
|
||||
self.apiurl: Optional[str] = None
|
||||
self.project: Optional[str] = None
|
||||
self.package: Optional[str] = None
|
||||
|
||||
def read(self, serviceinfo_node, append=False):
|
||||
"""read in the source services ``<services>`` element passed as
|
||||
elementtree node.
|
||||
"""
|
||||
def error(msg, xml):
|
||||
from ..core import ET_ENCODING
|
||||
data = f'invalid service format:\n{ET.tostring(xml, encoding=ET_ENCODING)}'
|
||||
raise ValueError(f"{data}\n\n{msg}")
|
||||
|
||||
if serviceinfo_node is None:
|
||||
return
|
||||
if not append:
|
||||
self.services = []
|
||||
services = serviceinfo_node.findall('service')
|
||||
|
||||
for service in services:
|
||||
name = service.get('name')
|
||||
if name is None:
|
||||
error("invalid service definition. Attribute name missing.", service)
|
||||
if len(name) < 3 or '/' in name:
|
||||
error(f"invalid service name: {name}", service)
|
||||
mode = service.get('mode', '')
|
||||
data = {'name': name, 'mode': mode}
|
||||
command = [name]
|
||||
for param in service.findall('param'):
|
||||
option = param.get('name')
|
||||
if option is None:
|
||||
error(f"{name}: a parameter requires a name", service)
|
||||
value = ''
|
||||
if param.text:
|
||||
value = param.text
|
||||
command.append('--' + option)
|
||||
# hmm is this reasonable or do we want to allow real
|
||||
# options (e.g., "--force" (without an argument)) as well?
|
||||
command.append(value)
|
||||
data['command'] = command
|
||||
self.services.append(data)
|
||||
|
||||
def getProjectGlobalServices(self, apiurl: str, project: str, package: str):
|
||||
from ..core import http_POST
|
||||
from ..core import makeurl
|
||||
|
||||
self.apiurl = apiurl
|
||||
# get all project wide services in one file, we don't store it yet
|
||||
u = makeurl(apiurl, ["source", project, package], query={"cmd": "getprojectservices"})
|
||||
try:
|
||||
f = http_POST(u)
|
||||
root = ET.parse(f).getroot()
|
||||
self.read(root, True)
|
||||
self.project = project
|
||||
self.package = package
|
||||
except HTTPError as e:
|
||||
if e.code == 404 and package != '_project':
|
||||
self.getProjectGlobalServices(apiurl, project, '_project')
|
||||
self.package = package
|
||||
elif e.code != 403 and e.code != 400:
|
||||
raise e
|
||||
|
||||
def addVerifyFile(self, serviceinfo_node, filename: str):
|
||||
f = open(filename, 'rb')
|
||||
digest = hashlib.sha256(f.read()).hexdigest()
|
||||
f.close()
|
||||
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="verify_file")
|
||||
ET.SubElement(s, "param", name="file").text = filename
|
||||
ET.SubElement(s, "param", name="verifier").text = "sha256"
|
||||
ET.SubElement(s, "param", name="checksum").text = digest
|
||||
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def addDownloadUrl(self, serviceinfo_node, url_string: str):
|
||||
url = urlparse(url_string)
|
||||
protocol = url.scheme
|
||||
host = url.netloc
|
||||
path = url.path
|
||||
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="download_url")
|
||||
ET.SubElement(s, "param", name="protocol").text = protocol
|
||||
ET.SubElement(s, "param", name="host").text = host
|
||||
ET.SubElement(s, "param", name="path").text = path
|
||||
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def addSetVersion(self, serviceinfo_node):
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="set_version", mode="buildtime")
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def addGitUrl(self, serviceinfo_node, url_string: Optional[str]):
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="obs_scm")
|
||||
ET.SubElement(s, "param", name="url").text = url_string
|
||||
ET.SubElement(s, "param", name="scm").text = "git"
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def addTarUp(self, serviceinfo_node):
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="tar", mode="buildtime")
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def addRecompressTar(self, serviceinfo_node):
|
||||
r = serviceinfo_node
|
||||
s = ET.Element("service", name="recompress", mode="buildtime")
|
||||
ET.SubElement(s, "param", name="file").text = "*.tar"
|
||||
ET.SubElement(s, "param", name="compression").text = "xz"
|
||||
r.append(s)
|
||||
return r
|
||||
|
||||
def execute(self, dir, callmode: Optional[str] = None, singleservice=None, verbose: Optional[bool] = None):
|
||||
old_dir = os.path.join(dir, '.old')
|
||||
|
||||
# if 2 osc instances are executed at a time one, of them fails on .old file existence
|
||||
# sleep up to 10 seconds until we can create the directory
|
||||
for i in reversed(range(10)):
|
||||
try:
|
||||
os.mkdir(old_dir)
|
||||
break
|
||||
except FileExistsError:
|
||||
time.sleep(1)
|
||||
|
||||
if i == 0:
|
||||
msg = f'"{old_dir}" exists, please remove it'
|
||||
raise oscerr.OscIOError(None, msg)
|
||||
|
||||
try:
|
||||
result = self._execute(dir, old_dir, callmode, singleservice, verbose)
|
||||
finally:
|
||||
shutil.rmtree(old_dir)
|
||||
return result
|
||||
|
||||
def _execute(
|
||||
self, dir, old_dir, callmode: Optional[str] = None, singleservice=None, verbose: Optional[bool] = None
|
||||
):
|
||||
from ..core import get_osc_version
|
||||
from ..core import run_external
|
||||
from ..core import vc_export_env
|
||||
|
||||
# cleanup existing generated files
|
||||
for filename in os.listdir(dir):
|
||||
if filename.startswith('_service:') or filename.startswith('_service_'):
|
||||
os.rename(os.path.join(dir, filename),
|
||||
os.path.join(old_dir, filename))
|
||||
|
||||
allservices = self.services or []
|
||||
service_names = [s['name'] for s in allservices]
|
||||
if singleservice and singleservice not in service_names:
|
||||
# set array to the manual specified singleservice, if it is not part of _service file
|
||||
data = {'name': singleservice, 'command': [singleservice], 'mode': callmode}
|
||||
allservices = [data]
|
||||
elif singleservice:
|
||||
allservices = [s for s in allservices if s['name'] == singleservice]
|
||||
# set the right called mode or the service would be skipped below
|
||||
for s in allservices:
|
||||
s['mode'] = callmode
|
||||
|
||||
if not allservices:
|
||||
# short-circuit to avoid a potential http request in vc_export_env
|
||||
# (if there are no services to execute this http request is
|
||||
# useless)
|
||||
return 0
|
||||
|
||||
# services can detect that they run via osc this way
|
||||
os.putenv("OSC_VERSION", get_osc_version())
|
||||
|
||||
# set environment when using OBS 2.3 or later
|
||||
if self.project is not None:
|
||||
# These need to be kept in sync with bs_service
|
||||
os.putenv("OBS_SERVICE_APIURL", self.apiurl)
|
||||
os.putenv("OBS_SERVICE_PROJECT", self.project)
|
||||
os.putenv("OBS_SERVICE_PACKAGE", self.package)
|
||||
# also export vc env vars (some services (like obs_scm) use them)
|
||||
vc_export_env(self.apiurl)
|
||||
|
||||
# recreate files
|
||||
ret = 0
|
||||
for service in allservices:
|
||||
if callmode != "all":
|
||||
if service['mode'] == "buildtime":
|
||||
continue
|
||||
if service['mode'] == "serveronly" and callmode != "local":
|
||||
continue
|
||||
if service['mode'] == "manual" and callmode != "manual":
|
||||
continue
|
||||
if service['mode'] != "manual" and callmode == "manual":
|
||||
continue
|
||||
if service['mode'] == "disabled" and callmode != "disabled":
|
||||
continue
|
||||
if service['mode'] != "disabled" and callmode == "disabled":
|
||||
continue
|
||||
if service['mode'] != "trylocal" and service['mode'] != "localonly" and callmode == "trylocal":
|
||||
continue
|
||||
temp_dir = None
|
||||
try:
|
||||
temp_dir = tempfile.mkdtemp(dir=dir, suffix=f".{service['name']}.service")
|
||||
cmd = service['command']
|
||||
if not os.path.exists("/usr/lib/obs/service/" + cmd[0]):
|
||||
raise oscerr.PackageNotInstalled(f"obs-service-{cmd[0]}")
|
||||
cmd[0] = "/usr/lib/obs/service/" + cmd[0]
|
||||
cmd = cmd + ["--outdir", temp_dir]
|
||||
output.print_msg("Run source service:", " ".join(cmd), print_to="verbose")
|
||||
r = run_external(*cmd)
|
||||
|
||||
if r != 0:
|
||||
print("Aborting: service call failed: ", ' '.join(cmd))
|
||||
# FIXME: addDownloadUrlService calls si.execute after
|
||||
# updating _services.
|
||||
return r
|
||||
|
||||
if service['mode'] == "manual" or service['mode'] == "disabled" or service['mode'] == "trylocal" or service['mode'] == "localonly" or callmode == "local" or callmode == "trylocal" or callmode == "all":
|
||||
for filename in os.listdir(temp_dir):
|
||||
os.rename(os.path.join(temp_dir, filename), os.path.join(dir, filename))
|
||||
else:
|
||||
name = service['name']
|
||||
for filename in os.listdir(temp_dir):
|
||||
os.rename(os.path.join(temp_dir, filename), os.path.join(dir, "_service:" + name + ":" + filename))
|
||||
finally:
|
||||
if temp_dir is not None:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
return 0
|
566
osc/obs_scm/store.py
Normal file
566
osc/obs_scm/store.py
Normal file
@ -0,0 +1,566 @@
|
||||
"""
|
||||
Store class wraps access to files in the '.osc' directory.
|
||||
It is meant to be used as an implementation detail of Project and Package classes
|
||||
and shouldn't be used in any code outside osc.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from .. import oscerr
|
||||
from .._private import api
|
||||
from ..util.xml import ET
|
||||
|
||||
|
||||
# __store_version__ is to be incremented when the format of the working copy
|
||||
# "store" changes in an incompatible way. Please add any needed migration
|
||||
# functionality to check_store_version().
|
||||
__store_version__ = '1.0'
|
||||
|
||||
|
||||
class Store:
|
||||
STORE_DIR = ".osc"
|
||||
STORE_VERSION = "1.0"
|
||||
|
||||
@classmethod
|
||||
def is_project_dir(cls, path):
|
||||
try:
|
||||
store = cls(path)
|
||||
except oscerr.NoWorkingCopy:
|
||||
return False
|
||||
return store.is_project
|
||||
|
||||
@classmethod
|
||||
def is_package_dir(cls, path):
|
||||
try:
|
||||
store = cls(path)
|
||||
except oscerr.NoWorkingCopy:
|
||||
return False
|
||||
return store.is_package
|
||||
|
||||
def __init__(self, path, check=True):
|
||||
self.path = path
|
||||
self.abspath = os.path.abspath(self.path)
|
||||
|
||||
self.is_project = self.exists("_project") and not self.exists("_package")
|
||||
self.is_package = self.exists("_project") and self.exists("_package")
|
||||
|
||||
if check and not any([self.is_project, self.is_package]):
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def __contains__(self, fn):
|
||||
return self.exists(fn)
|
||||
|
||||
def __iter__(self):
|
||||
path = os.path.join(self.abspath, self.STORE_DIR)
|
||||
yield from os.listdir(path)
|
||||
|
||||
def assert_is_project(self):
|
||||
if not self.is_project:
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy of a project"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def assert_is_package(self):
|
||||
if not self.is_package:
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy of a package"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def get_path(self, fn, subdir=None):
|
||||
# sanitize input to ensure that joining path works as expected
|
||||
fn = fn.lstrip("/")
|
||||
if subdir:
|
||||
subdir = subdir.lstrip("/")
|
||||
return os.path.join(self.abspath, self.STORE_DIR, subdir, fn)
|
||||
return os.path.join(self.abspath, self.STORE_DIR, fn)
|
||||
|
||||
def exists(self, fn, subdir=None):
|
||||
return os.path.exists(self.get_path(fn, subdir=subdir))
|
||||
|
||||
def unlink(self, fn, subdir=None):
|
||||
try:
|
||||
os.unlink(self.get_path(fn, subdir=subdir))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def read_file(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
def write_file(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
try:
|
||||
if subdir:
|
||||
os.makedirs(self.get_path(subdir))
|
||||
else:
|
||||
os.makedirs(self.get_path(""))
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
old = self.get_path(fn, subdir=subdir)
|
||||
new = self.get_path(f"{fn}.new", subdir=subdir)
|
||||
try:
|
||||
with open(new, "w", encoding="utf-8") as f:
|
||||
f.write(value)
|
||||
os.rename(new, old)
|
||||
except:
|
||||
if os.path.exists(new):
|
||||
os.unlink(new)
|
||||
raise
|
||||
|
||||
def read_list(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return [line.rstrip("\n") for line in f]
|
||||
|
||||
def write_list(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if not isinstance(value, (list, tuple)):
|
||||
msg = f"The argument `value` should be list, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
value = "".join((f"{line or ''}\n" for line in value))
|
||||
self.write_file(fn, value, subdir=subdir)
|
||||
|
||||
def read_string(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return f.readline().strip()
|
||||
|
||||
def write_string(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
if not isinstance(value, str):
|
||||
msg = f"The argument `value` should be str, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
self.write_file(fn, f"{value}\n", subdir=subdir)
|
||||
|
||||
def read_int(self, fn):
|
||||
if not self.exists(fn):
|
||||
return None
|
||||
result = self.read_string(fn)
|
||||
if not result.isdigit():
|
||||
return None
|
||||
return int(result)
|
||||
|
||||
def write_int(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if not isinstance(value, int):
|
||||
msg = f"The argument `value` should be int, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
value = str(value)
|
||||
self.write_string(fn, value, subdir=subdir)
|
||||
|
||||
def read_xml_node(self, fn, node_name, subdir=None):
|
||||
path = self.get_path(fn, subdir=subdir)
|
||||
try:
|
||||
tree = ET.parse(path)
|
||||
except SyntaxError as e:
|
||||
msg = f"Unable to parse '{path}': {e}"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
root = tree.getroot()
|
||||
assert root.tag == node_name
|
||||
# TODO: return root?
|
||||
return tree
|
||||
|
||||
def write_xml_node(self, fn, node_name, node, subdir=None):
|
||||
path = self.get_path(fn, subdir=subdir)
|
||||
assert node.tag == node_name
|
||||
api.write_xml_node_to_file(node, path)
|
||||
|
||||
def _sanitize_apiurl(self, value):
|
||||
# apiurl shouldn't end with a slash, strip it so we can use apiurl without modifications
|
||||
# in config['api_host_options'][apiurl] and other places
|
||||
if isinstance(value, str):
|
||||
value = value.strip("/")
|
||||
elif isinstance(value, bytes):
|
||||
value = value.strip(b"/")
|
||||
return value
|
||||
|
||||
@property
|
||||
def apiurl(self):
|
||||
return self._sanitize_apiurl(self.read_string("_apiurl"))
|
||||
|
||||
@apiurl.setter
|
||||
def apiurl(self, value):
|
||||
self.write_string("_apiurl", self._sanitize_apiurl(value))
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
return self.read_string("_project")
|
||||
|
||||
@project.setter
|
||||
def project(self, value):
|
||||
self.write_string("_project", value)
|
||||
|
||||
@property
|
||||
def package(self):
|
||||
return self.read_string("_package")
|
||||
|
||||
@package.setter
|
||||
def package(self, value):
|
||||
self.write_string("_package", value)
|
||||
|
||||
@property
|
||||
def scmurl(self):
|
||||
return self.read_string("_scm")
|
||||
|
||||
@scmurl.setter
|
||||
def scmurl(self, value):
|
||||
return self.write_string("_scm", value)
|
||||
|
||||
@property
|
||||
def size_limit(self):
|
||||
return self.read_int("_size_limit")
|
||||
|
||||
@size_limit.setter
|
||||
def size_limit(self, value):
|
||||
return self.write_int("_size_limit", value)
|
||||
|
||||
@property
|
||||
def to_be_added(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_to_be_added") or []
|
||||
|
||||
@to_be_added.setter
|
||||
def to_be_added(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_to_be_added", value)
|
||||
|
||||
@property
|
||||
def to_be_deleted(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_to_be_deleted") or []
|
||||
|
||||
@to_be_deleted.setter
|
||||
def to_be_deleted(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_to_be_deleted", value)
|
||||
|
||||
@property
|
||||
def in_conflict(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_in_conflict") or []
|
||||
|
||||
@in_conflict.setter
|
||||
def in_conflict(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_in_conflict", value)
|
||||
|
||||
@property
|
||||
def osclib_version(self):
|
||||
return self.read_string("_osclib_version")
|
||||
|
||||
@property
|
||||
def files(self):
|
||||
from .. import core as osc_core
|
||||
|
||||
self.assert_is_package()
|
||||
if self.exists("_scm"):
|
||||
msg = "Package '{self.path}' is managed via SCM"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
if not self.exists("_files"):
|
||||
msg = "Package '{self.path}' doesn't contain _files metadata"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
result = []
|
||||
directory_node = self.read_xml_node("_files", "directory").getroot()
|
||||
for entry_node in api.find_nodes(directory_node, "directory", "entry"):
|
||||
result.append(osc_core.File.from_xml_node(entry_node))
|
||||
return result
|
||||
|
||||
@files.setter
|
||||
def files(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
msg = f"The argument `value` should be list, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
|
||||
root = ET.Element("directory")
|
||||
for file_obj in sorted(value):
|
||||
file_obj.to_xml_node(root)
|
||||
self.write_xml_node("_files", "directory", root)
|
||||
|
||||
@property
|
||||
def last_buildroot(self):
|
||||
self.assert_is_package()
|
||||
items = self.read_list("_last_buildroot")
|
||||
if items is None:
|
||||
return items
|
||||
|
||||
if len(items) != 3:
|
||||
msg = f"Package '{self.path}' contains _last_buildroot metadata that doesn't contain 3 lines: [repo, arch, vm_type]"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
if items[2] in ("", "None"):
|
||||
items[2] = None
|
||||
|
||||
return items
|
||||
|
||||
@last_buildroot.setter
|
||||
def last_buildroot(self, value):
|
||||
self.assert_is_package()
|
||||
if len(value) != 3:
|
||||
raise ValueError("A list with exactly 3 items is expected: [repo, arch, vm_type]")
|
||||
self.write_list("_last_buildroot", value)
|
||||
|
||||
@property
|
||||
def _meta_node(self):
|
||||
if not self.exists("_meta"):
|
||||
return None
|
||||
if self.is_package:
|
||||
root = self.read_xml_node("_meta", "package").getroot()
|
||||
else:
|
||||
root = self.read_xml_node("_meta", "project").getroot()
|
||||
return root
|
||||
|
||||
|
||||
store = '.osc'
|
||||
|
||||
|
||||
def check_store_version(dir):
|
||||
global store
|
||||
|
||||
versionfile = os.path.join(dir, store, '_osclib_version')
|
||||
try:
|
||||
with open(versionfile) as f:
|
||||
v = f.read().strip()
|
||||
except:
|
||||
v = ''
|
||||
|
||||
if v == '':
|
||||
msg = f'Error: "{os.path.abspath(dir)}" is not an osc package working copy.'
|
||||
if os.path.exists(os.path.join(dir, '.svn')):
|
||||
msg = msg + '\nTry svn instead of osc.'
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
if v != __store_version__:
|
||||
if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
|
||||
# version is fine, no migration needed
|
||||
f = open(versionfile, 'w')
|
||||
f.write(__store_version__ + '\n')
|
||||
f.close()
|
||||
return
|
||||
msg = f'The osc metadata of your working copy "{dir}"'
|
||||
msg += f'\nhas __store_version__ = {v}, but it should be {__store_version__}'
|
||||
msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
|
||||
raise oscerr.WorkingCopyWrongVersion(msg)
|
||||
|
||||
|
||||
def is_project_dir(d):
|
||||
global store
|
||||
|
||||
return os.path.exists(os.path.join(d, store, '_project')) and not \
|
||||
os.path.exists(os.path.join(d, store, '_package'))
|
||||
|
||||
|
||||
def is_package_dir(d):
|
||||
global store
|
||||
|
||||
return os.path.exists(os.path.join(d, store, '_project')) and \
|
||||
os.path.exists(os.path.join(d, store, '_package'))
|
||||
|
||||
|
||||
def read_filemeta(dir):
|
||||
global store
|
||||
|
||||
msg = f'\'{dir}\' is not a valid working copy.'
|
||||
filesmeta = os.path.join(dir, store, '_files')
|
||||
if not is_package_dir(dir):
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
if os.path.isfile(os.path.join(dir, store, '_scm')):
|
||||
raise oscerr.NoWorkingCopy("Is managed via scm")
|
||||
if not os.path.isfile(filesmeta):
|
||||
raise oscerr.NoWorkingCopy(f'{msg} ({filesmeta} does not exist)')
|
||||
|
||||
try:
|
||||
r = ET.parse(filesmeta)
|
||||
except SyntaxError as e:
|
||||
raise oscerr.NoWorkingCopy(f'{msg}\nWhen parsing .osc/_files, the following error was encountered:\n{e}')
|
||||
return r
|
||||
|
||||
|
||||
def store_readlist(dir, name):
|
||||
global store
|
||||
|
||||
r = []
|
||||
if os.path.exists(os.path.join(dir, store, name)):
|
||||
with open(os.path.join(dir, store, name)) as f:
|
||||
r = [line.rstrip('\n') for line in f]
|
||||
return r
|
||||
|
||||
|
||||
def read_tobeadded(dir):
|
||||
return store_readlist(dir, '_to_be_added')
|
||||
|
||||
|
||||
def read_tobedeleted(dir):
|
||||
return store_readlist(dir, '_to_be_deleted')
|
||||
|
||||
|
||||
def read_sizelimit(dir):
|
||||
global store
|
||||
|
||||
r = None
|
||||
fname = os.path.join(dir, store, '_size_limit')
|
||||
|
||||
if os.path.exists(fname):
|
||||
with open(fname) as f:
|
||||
r = f.readline().strip()
|
||||
|
||||
if r is None or not r.isdigit():
|
||||
return None
|
||||
return int(r)
|
||||
|
||||
|
||||
def read_inconflict(dir):
|
||||
return store_readlist(dir, '_in_conflict')
|
||||
|
||||
|
||||
def store_read_project(dir):
|
||||
global store
|
||||
|
||||
try:
|
||||
with open(os.path.join(dir, store, '_project')) as f:
|
||||
p = f.readline().strip()
|
||||
except OSError:
|
||||
msg = f'Error: \'{os.path.abspath(dir)}\' is not an osc project dir or working copy'
|
||||
if os.path.exists(os.path.join(dir, '.svn')):
|
||||
msg += '\nTry svn instead of osc.'
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
return p
|
||||
|
||||
|
||||
def store_read_package(dir):
|
||||
global store
|
||||
|
||||
try:
|
||||
with open(os.path.join(dir, store, '_package')) as f:
|
||||
p = f.readline().strip()
|
||||
except OSError:
|
||||
msg = f'Error: \'{os.path.abspath(dir)}\' is not an osc package working copy'
|
||||
if os.path.exists(os.path.join(dir, '.svn')):
|
||||
msg += '\nTry svn instead of osc.'
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
return p
|
||||
|
||||
|
||||
def store_read_scmurl(dir):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"osc.core.store_read_scmurl() is deprecated. "
|
||||
"You should be using high-level classes such as Store, Project or Package instead.",
|
||||
DeprecationWarning
|
||||
)
|
||||
return Store(dir).scmurl
|
||||
|
||||
|
||||
def store_read_apiurl(dir, defaulturl=True):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"osc.core.store_read_apiurl() is deprecated. "
|
||||
"You should be using high-level classes such as Store, Project or Package instead.",
|
||||
DeprecationWarning
|
||||
)
|
||||
return Store(dir).apiurl
|
||||
|
||||
|
||||
def store_read_last_buildroot(dir):
|
||||
global store
|
||||
|
||||
fname = os.path.join(dir, store, '_last_buildroot')
|
||||
if os.path.exists(fname):
|
||||
lines = open(fname).read().splitlines()
|
||||
if len(lines) == 3:
|
||||
return lines
|
||||
|
||||
return
|
||||
|
||||
|
||||
def store_write_string(dir, file, string, subdir=''):
|
||||
from ..core import decode_it
|
||||
|
||||
global store
|
||||
|
||||
if subdir and not os.path.isdir(os.path.join(dir, store, subdir)):
|
||||
os.mkdir(os.path.join(dir, store, subdir))
|
||||
fname = os.path.join(dir, store, subdir, file)
|
||||
try:
|
||||
f = open(fname + '.new', 'w')
|
||||
if not isinstance(string, str):
|
||||
string = decode_it(string)
|
||||
f.write(string)
|
||||
f.close()
|
||||
os.rename(fname + '.new', fname)
|
||||
except:
|
||||
if os.path.exists(fname + '.new'):
|
||||
os.unlink(fname + '.new')
|
||||
raise
|
||||
|
||||
|
||||
def store_write_project(dir, project):
|
||||
store_write_string(dir, '_project', project + '\n')
|
||||
|
||||
|
||||
def store_write_apiurl(dir, apiurl):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"osc.core.store_write_apiurl() is deprecated. "
|
||||
"You should be using high-level classes such as Store, Project or Package instead.",
|
||||
DeprecationWarning
|
||||
)
|
||||
Store(dir).apiurl = apiurl
|
||||
|
||||
|
||||
def store_write_last_buildroot(dir, repo, arch, vm_type):
|
||||
store_write_string(dir, '_last_buildroot', repo + '\n' + arch + '\n' + vm_type + '\n')
|
||||
|
||||
|
||||
def store_unlink_file(dir, file):
|
||||
global store
|
||||
|
||||
try:
|
||||
os.unlink(os.path.join(dir, store, file))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def store_read_file(dir, file):
|
||||
global store
|
||||
|
||||
try:
|
||||
with open(os.path.join(dir, store, file)) as f:
|
||||
return f.read()
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def store_write_initial_packages(dir, project, subelements):
|
||||
global store
|
||||
|
||||
fname = os.path.join(dir, store, '_packages')
|
||||
root = ET.Element('project', name=project)
|
||||
for elem in subelements:
|
||||
root.append(elem)
|
||||
ET.ElementTree(root).write(fname)
|
||||
|
||||
|
||||
def delete_storedir(store_dir):
|
||||
"""
|
||||
This method deletes a store dir.
|
||||
"""
|
||||
from ..core import delete_dir
|
||||
|
||||
head, tail = os.path.split(store_dir)
|
||||
if tail == '.osc':
|
||||
delete_dir(store_dir)
|
307
osc/store.py
307
osc/store.py
@ -9,313 +9,8 @@ import os
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from . import oscerr
|
||||
from ._private import api
|
||||
from . import git_scm
|
||||
|
||||
class Store:
|
||||
STORE_DIR = ".osc"
|
||||
STORE_VERSION = "1.0"
|
||||
|
||||
@classmethod
|
||||
def is_project_dir(cls, path):
|
||||
try:
|
||||
store = cls(path)
|
||||
except oscerr.NoWorkingCopy:
|
||||
return False
|
||||
return store.is_project
|
||||
|
||||
@classmethod
|
||||
def is_package_dir(cls, path):
|
||||
try:
|
||||
store = cls(path)
|
||||
except oscerr.NoWorkingCopy:
|
||||
return False
|
||||
return store.is_package
|
||||
|
||||
def __init__(self, path, check=True):
|
||||
self.path = path
|
||||
self.abspath = os.path.abspath(self.path)
|
||||
|
||||
self.is_project = self.exists("_project") and not self.exists("_package")
|
||||
self.is_package = self.exists("_project") and self.exists("_package")
|
||||
|
||||
if check and not any([self.is_project, self.is_package]):
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def __contains__(self, fn):
|
||||
return self.exists(fn)
|
||||
|
||||
def __iter__(self):
|
||||
path = os.path.join(self.abspath, self.STORE_DIR)
|
||||
yield from os.listdir(path)
|
||||
|
||||
def assert_is_project(self):
|
||||
if not self.is_project:
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy of a project"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def assert_is_package(self):
|
||||
if not self.is_package:
|
||||
msg = f"Directory '{self.path}' is not an OBS SCM working copy of a package"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
def get_path(self, fn, subdir=None):
|
||||
# sanitize input to ensure that joining path works as expected
|
||||
fn = fn.lstrip("/")
|
||||
if subdir:
|
||||
subdir = subdir.lstrip("/")
|
||||
return os.path.join(self.abspath, self.STORE_DIR, subdir, fn)
|
||||
return os.path.join(self.abspath, self.STORE_DIR, fn)
|
||||
|
||||
def exists(self, fn, subdir=None):
|
||||
return os.path.exists(self.get_path(fn, subdir=subdir))
|
||||
|
||||
def unlink(self, fn, subdir=None):
|
||||
try:
|
||||
os.unlink(self.get_path(fn, subdir=subdir))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def read_file(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
def write_file(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
try:
|
||||
if subdir:
|
||||
os.makedirs(self.get_path(subdir))
|
||||
else:
|
||||
os.makedirs(self.get_path(""))
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
old = self.get_path(fn, subdir=subdir)
|
||||
new = self.get_path(f"{fn}.new", subdir=subdir)
|
||||
try:
|
||||
with open(new, "w", encoding="utf-8") as f:
|
||||
f.write(value)
|
||||
os.rename(new, old)
|
||||
except:
|
||||
if os.path.exists(new):
|
||||
os.unlink(new)
|
||||
raise
|
||||
|
||||
def read_list(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return [line.rstrip("\n") for line in f]
|
||||
|
||||
def write_list(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if not isinstance(value, (list, tuple)):
|
||||
msg = f"The argument `value` should be list, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
value = "".join((f"{line or ''}\n" for line in value))
|
||||
self.write_file(fn, value, subdir=subdir)
|
||||
|
||||
def read_string(self, fn, subdir=None):
|
||||
if not self.exists(fn, subdir=subdir):
|
||||
return None
|
||||
with open(self.get_path(fn, subdir=subdir), encoding="utf-8") as f:
|
||||
return f.readline().strip()
|
||||
|
||||
def write_string(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
if not isinstance(value, str):
|
||||
msg = f"The argument `value` should be str, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
self.write_file(fn, f"{value}\n", subdir=subdir)
|
||||
|
||||
def read_int(self, fn):
|
||||
if not self.exists(fn):
|
||||
return None
|
||||
result = self.read_string(fn)
|
||||
if not result.isdigit():
|
||||
return None
|
||||
return int(result)
|
||||
|
||||
def write_int(self, fn, value, subdir=None):
|
||||
if value is None:
|
||||
self.unlink(fn, subdir=subdir)
|
||||
return
|
||||
if not isinstance(value, int):
|
||||
msg = f"The argument `value` should be int, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
value = str(value)
|
||||
self.write_string(fn, value, subdir=subdir)
|
||||
|
||||
def read_xml_node(self, fn, node_name, subdir=None):
|
||||
path = self.get_path(fn, subdir=subdir)
|
||||
try:
|
||||
tree = ET.parse(path)
|
||||
except SyntaxError as e:
|
||||
msg = f"Unable to parse '{path}': {e}"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
root = tree.getroot()
|
||||
assert root.tag == node_name
|
||||
# TODO: return root?
|
||||
return tree
|
||||
|
||||
def write_xml_node(self, fn, node_name, node, subdir=None):
|
||||
path = self.get_path(fn, subdir=subdir)
|
||||
assert node.tag == node_name
|
||||
api.write_xml_node_to_file(node, path)
|
||||
|
||||
def _sanitize_apiurl(self, value):
|
||||
# apiurl shouldn't end with a slash, strip it so we can use apiurl without modifications
|
||||
# in config['api_host_options'][apiurl] and other places
|
||||
if isinstance(value, str):
|
||||
value = value.strip("/")
|
||||
elif isinstance(value, bytes):
|
||||
value = value.strip(b"/")
|
||||
return value
|
||||
|
||||
@property
|
||||
def apiurl(self):
|
||||
return self._sanitize_apiurl(self.read_string("_apiurl"))
|
||||
|
||||
@apiurl.setter
|
||||
def apiurl(self, value):
|
||||
self.write_string("_apiurl", self._sanitize_apiurl(value))
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
return self.read_string("_project")
|
||||
|
||||
@project.setter
|
||||
def project(self, value):
|
||||
self.write_string("_project", value)
|
||||
|
||||
@property
|
||||
def package(self):
|
||||
return self.read_string("_package")
|
||||
|
||||
@package.setter
|
||||
def package(self, value):
|
||||
self.write_string("_package", value)
|
||||
|
||||
@property
|
||||
def scmurl(self):
|
||||
return self.read_string("_scm")
|
||||
|
||||
@scmurl.setter
|
||||
def scmurl(self, value):
|
||||
return self.write_string("_scm", value)
|
||||
|
||||
@property
|
||||
def size_limit(self):
|
||||
return self.read_int("_size_limit")
|
||||
|
||||
@size_limit.setter
|
||||
def size_limit(self, value):
|
||||
return self.write_int("_size_limit", value)
|
||||
|
||||
@property
|
||||
def to_be_added(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_to_be_added") or []
|
||||
|
||||
@to_be_added.setter
|
||||
def to_be_added(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_to_be_added", value)
|
||||
|
||||
@property
|
||||
def to_be_deleted(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_to_be_deleted") or []
|
||||
|
||||
@to_be_deleted.setter
|
||||
def to_be_deleted(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_to_be_deleted", value)
|
||||
|
||||
@property
|
||||
def in_conflict(self):
|
||||
self.assert_is_package()
|
||||
return self.read_list("_in_conflict") or []
|
||||
|
||||
@in_conflict.setter
|
||||
def in_conflict(self, value):
|
||||
self.assert_is_package()
|
||||
return self.write_list("_in_conflict", value)
|
||||
|
||||
@property
|
||||
def osclib_version(self):
|
||||
return self.read_string("_osclib_version")
|
||||
|
||||
@property
|
||||
def files(self):
|
||||
self.assert_is_package()
|
||||
if self.exists("_scm"):
|
||||
msg = "Package '{self.path}' is managed via SCM"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
if not self.exists("_files"):
|
||||
msg = "Package '{self.path}' doesn't contain _files metadata"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
result = []
|
||||
directory_node = self.read_xml_node("_files", "directory").getroot()
|
||||
from . import core as osc_core
|
||||
for entry_node in api.find_nodes(directory_node, "directory", "entry"):
|
||||
result.append(osc_core.File.from_xml_node(entry_node))
|
||||
return result
|
||||
|
||||
@files.setter
|
||||
def files(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
msg = f"The argument `value` should be list, not {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
|
||||
root = ET.Element("directory")
|
||||
for file_obj in sorted(value):
|
||||
file_obj.to_xml_node(root)
|
||||
self.write_xml_node("_files", "directory", root)
|
||||
|
||||
@property
|
||||
def last_buildroot(self):
|
||||
self.assert_is_package()
|
||||
items = self.read_list("_last_buildroot")
|
||||
if items is None:
|
||||
return items
|
||||
|
||||
if len(items) != 3:
|
||||
msg = f"Package '{self.path}' contains _last_buildroot metadata that doesn't contain 3 lines: [repo, arch, vm_type]"
|
||||
raise oscerr.NoWorkingCopy(msg)
|
||||
|
||||
if items[2] in ("", "None"):
|
||||
items[2] = None
|
||||
|
||||
return items
|
||||
|
||||
@last_buildroot.setter
|
||||
def last_buildroot(self, value):
|
||||
self.assert_is_package()
|
||||
if len(value) != 3:
|
||||
raise ValueError("A list with exactly 3 items is expected: [repo, arch, vm_type]")
|
||||
self.write_list("_last_buildroot", value)
|
||||
|
||||
@property
|
||||
def _meta_node(self):
|
||||
if not self.exists("_meta"):
|
||||
return None
|
||||
if self.is_package:
|
||||
root = self.read_xml_node("_meta", "package").getroot()
|
||||
else:
|
||||
root = self.read_xml_node("_meta", "project").getroot()
|
||||
return root
|
||||
from .obs_scm import Store
|
||||
|
||||
|
||||
def get_store(path, check=True, print_warnings=False):
|
||||
|
Loading…
Reference in New Issue
Block a user