Move expanded_repos into staging api
and make pkglistgen and repo_checker use it. We will have more cases in the future of tools requiring it :)
This commit is contained in:
parent
94e2127dd8
commit
4f275a5af8
@ -1826,3 +1826,16 @@ class StagingAPI(object):
|
||||
return meta.find(xpath) is not None
|
||||
|
||||
return False
|
||||
|
||||
# recursively detect underlying projects
|
||||
def expand_project_repo(self, project, repo, repos):
|
||||
repos.append([project, repo])
|
||||
url = self.makeurl(['source', project, '_meta'])
|
||||
meta = ET.parse(self.retried_GET(url)).getroot()
|
||||
for path in meta.findall('.//repository[@name="{}"]/path'.format(repo)):
|
||||
self.expand_project_repo(path.get('project', project), path.get('repository'), repos)
|
||||
return repos
|
||||
|
||||
def expanded_repos(self, repo):
|
||||
return self.expand_project_repo(self.project, repo, [])
|
||||
|
||||
|
@ -526,16 +526,8 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
for e in excludes:
|
||||
g.ignore(self.groups[e])
|
||||
|
||||
def expand_project_repo(self, project, repo, repos):
|
||||
repos.append([project, repo])
|
||||
url = makeurl(self.apiurl, ['source', project, '_meta'])
|
||||
meta = ET.parse(http_GET(url)).getroot()
|
||||
for path in meta.findall('.//repository[@name="{}"]/path'.format(repo)):
|
||||
self.expand_project_repo(path.get('project', project), path.get('repository'), repos)
|
||||
return repos
|
||||
|
||||
def expand_repos(self, project, repo):
|
||||
return self.expand_project_repo(project, repo, [])
|
||||
return StagingAPI(self.apiurl, project).expanded_repos('standard')
|
||||
|
||||
def _check_supplements(self):
|
||||
tocheck = set()
|
||||
|
@ -225,17 +225,6 @@ class RepoChecker(ReviewBot.ReviewBot):
|
||||
self.logger.debug('requests: {} skipped, {} queued'.format(
|
||||
count_before - len(self.requests), len(self.requests)))
|
||||
|
||||
def expand_project_repo(self, api, project, repo, repos):
|
||||
repos.append([project, repo])
|
||||
url = api.makeurl(['source', project, '_meta'])
|
||||
meta = ET.parse(api.retried_GET(url)).getroot()
|
||||
for path in meta.findall('.//repository[@name="{}"]/path'.format(repo)):
|
||||
self.expand_project_repo(api, path.get('project', project), path.get('repository'), repos)
|
||||
return repos
|
||||
|
||||
def expand_repos(self, api, project, repo):
|
||||
return self.expand_project_repo(api, project, repo, [])
|
||||
|
||||
def ensure_group(self, request, action):
|
||||
project = action.tgt_project
|
||||
group = self.requests_map[int(request.reqid)]
|
||||
@ -265,12 +254,12 @@ class RepoChecker(ReviewBot.ReviewBot):
|
||||
continue
|
||||
|
||||
# Only bother if staging can match arch, but layered first.
|
||||
repos = self.expand_repos(self.staging_api(project), project, 'standard')
|
||||
repos = self.staging_api(project).expanded_repos('standard')
|
||||
for layered_project, repo in reversed(repos):
|
||||
if repo != 'standard':
|
||||
raise "We assume all is standard"
|
||||
directories.insert(0, self.mirror(layered_project, arch))
|
||||
|
||||
|
||||
whitelist = self.binary_whitelist(project, arch, group)
|
||||
|
||||
# Perform checks on group.
|
||||
|
Loading…
x
Reference in New Issue
Block a user