implement file caching

to prevent having to download files multiple times
This commit is contained in:
Nico Krapp 2022-11-02 16:58:40 +01:00
parent 74f5cd901e
commit 7678967ae0
No known key found for this signature in database
GPG Key ID: AC35CFFF55212BC7

View File

@ -1,9 +1,11 @@
import errno import errno
import logging import logging
import shutil
import time import time
import urllib.parse import urllib.parse
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from urllib.error import HTTPError from urllib.error import HTTPError
from pathlib import Path
import osc.core import osc.core
@ -160,8 +162,13 @@ class OBS:
dirpath: str, dirpath: str,
file_md5: str, file_md5: str,
) -> None: ) -> None:
cached_file = self._path_from_md5(name, dirpath, file_md5)
if not self.in_cache(name, dirpath, file_md5):
with (dirpath / name).open("wb") as f: with (dirpath / name).open("wb") as f:
f.write(self._download(project, package, name, revision).read()) f.write(self._download(project, package, name, revision).read())
shutil.copy(dirpath / name, cached_file)
else:
shutil.copy(cached_file, dirpath / name)
def list(self, project, package, srcmd5, linkrev): def list(self, project, package, srcmd5, linkrev):
params = {"rev": srcmd5, "expand": "1"} params = {"rev": srcmd5, "expand": "1"}
@ -179,3 +186,16 @@ class OBS:
raise e raise e
return root return root
def _path_from_md5(self, name, dirpath, md5):
cache = dirpath.joinpath(".cache/")
if not Path(cache).exists():
cache.mkdir()
filepath = cache.joinpath(f"{md5[0:3]}/{md5[3:6]}/{md5[6:9]}/")
filepath.mkdir(parents=True, exist_ok=True)
return filepath.joinpath(f"{md5[9:]}-{name}")
def in_cache(self, name, dirpath, md5):
if self._path_from_md5(name, dirpath, md5).is_file():
return True
return False