Handle package names AND origins
This commit is contained in:
parent
5b833fc7da
commit
d54d634253
1 changed files with 31 additions and 32 deletions
|
@ -45,68 +45,66 @@ def _revalidate_packagesite(abi: str, pkgmirror_url: str) -> List[bytes]:
|
|||
return exfile.read().splitlines()
|
||||
|
||||
|
||||
def _load_packages(lines: List[bytes]) -> defaultdict:
|
||||
def _load_packages(lines: List[bytes]) -> dict:
|
||||
"""
|
||||
Load and return the packages from the passed JSON structured lines.
|
||||
"""
|
||||
result = defaultdict(dict)
|
||||
result = dict(names=defaultdict(dict), origins=defaultdict(dict))
|
||||
for line in lines:
|
||||
# print(f'{line}\n')
|
||||
loaded = loads(line)
|
||||
name = loaded['name']
|
||||
origin = loaded['origin']
|
||||
version = loaded['version']
|
||||
result[name][version] = loaded
|
||||
result['names'][name][version] = loaded
|
||||
result['origins'][origin][version] = loaded
|
||||
return result
|
||||
|
||||
|
||||
def _extract_deps(
|
||||
infodict_packages: defaultdict, passed_packages: dict) -> dict:
|
||||
def _extract_deps(loaded_packages: dict, passed_packages: dict) -> dict:
|
||||
'Compile and return the packages to check, including dependencies.'
|
||||
result = dict()
|
||||
dependencies = defaultdict(set)
|
||||
for name, versions in passed_packages.items():
|
||||
for version in versions:
|
||||
dict_version = infodict_packages[name][version]
|
||||
dict_version = loaded_packages[name][version]
|
||||
if 'deps' not in dict_version:
|
||||
continue
|
||||
for depended_pkg, dict_depended_item in \
|
||||
dict_version['deps'].items():
|
||||
if depended_pkg not in result:
|
||||
result[depended_pkg] = set()
|
||||
result[depended_pkg].add(dict_depended_item['version'])
|
||||
if not result:
|
||||
return result
|
||||
dependencies[depended_pkg].add(dict_depended_item['version'])
|
||||
if not dependencies: # No dependencies
|
||||
return dependencies
|
||||
dict_deps = _extract_deps(
|
||||
infodict_packages=infodict_packages, passed_packages=result)
|
||||
loaded_packages=loaded_packages, passed_packages=dependencies)
|
||||
for name, versions in dict_deps.items():
|
||||
if name not in result:
|
||||
result[name] = set()
|
||||
result[name].update(versions)
|
||||
return result
|
||||
dependencies[name].update(versions)
|
||||
return dependencies
|
||||
|
||||
|
||||
def _get_packages_to_check(
|
||||
pkgmirror_url: str, abi: str, infodict_packages: defaultdict,
|
||||
pkgmirror_url: str, abi: str, loaded_packages: dict,
|
||||
passed_packages: Set[str],
|
||||
) -> List[dict]:
|
||||
'Compile and return the packages to check.'
|
||||
set_not_in_packages = passed_packages - set(infodict_packages)
|
||||
unified_dict = \
|
||||
dict(**loaded_packages['names'], **loaded_packages['origins'])
|
||||
set_not_in_packages = passed_packages - set(unified_dict)
|
||||
if set_not_in_packages:
|
||||
raise KeyError(f'Packages not found: {set_not_in_packages}')
|
||||
dict_pkgs = {
|
||||
name: set(infodict_packages[name]) for name in passed_packages}
|
||||
dict_pkgs = {name: set(unified_dict[name]) for name in passed_packages}
|
||||
dict_deps = _extract_deps(
|
||||
infodict_packages=infodict_packages, passed_packages=dict_pkgs)
|
||||
loaded_packages=unified_dict, passed_packages=dict_pkgs)
|
||||
for name, versions in dict_deps.items():
|
||||
if name not in dict_pkgs:
|
||||
dict_pkgs[name] = set()
|
||||
dict_pkgs[name].update(versions)
|
||||
url_prefix = '/'.join((pkgmirror_url, abi, 'latest'))
|
||||
result = list()
|
||||
for name, versions in dict_pkgs.items():
|
||||
for name_or_origin, versions in dict_pkgs.items():
|
||||
for version in versions:
|
||||
dict_version = infodict_packages[name][version]
|
||||
dict_version = unified_dict[name_or_origin][version]
|
||||
result.append(dict(
|
||||
name=name, version=version,
|
||||
name_or_origin=name_or_origin, version=version,
|
||||
url='/'.join((url_prefix, dict_version['repopath'])),
|
||||
pkgsize=dict_version['pkgsize'], sha256=dict_version['sum']))
|
||||
return result
|
||||
|
@ -127,20 +125,22 @@ def _get_to_revalidate(packages_to_check: List[dict]) -> List[dict]:
|
|||
new `dict`.
|
||||
"""
|
||||
to_revalidate = dict()
|
||||
validated = []
|
||||
for dict_info in packages_to_check:
|
||||
name = dict_info['name']
|
||||
name_or_origin = dict_info['name_or_origin']
|
||||
url = dict_info['url']
|
||||
request = Request(url=url)
|
||||
dl_info = _fetch_and_get_info(request=request)
|
||||
if dict_info['pkgsize'] != dl_info['size']:
|
||||
print(f'Size mismatch: {name}')
|
||||
print(f'Size mismatch: {name_or_origin}')
|
||||
to_revalidate.append(dict_info)
|
||||
continue
|
||||
if dict_info['sha256'] != dl_info['digest']:
|
||||
print(f'SHA256 sum mismatch: {name}')
|
||||
print(f'SHA256 sum mismatch: {name_or_origin}')
|
||||
to_revalidate.append(dict_info)
|
||||
continue
|
||||
print(f'OK: {name}')
|
||||
validated.append(name_or_origin)
|
||||
print('OK: {validated}'.format(validated=' '.join(validated)))
|
||||
return to_revalidate
|
||||
|
||||
|
||||
|
@ -181,11 +181,10 @@ def run():
|
|||
abi = _get_abi(jail_root=args.jail_root)
|
||||
_check_pkgmirror_url(url=args.pkgmirror_url)
|
||||
lines = _revalidate_packagesite(abi=abi, pkgmirror_url=args.pkgmirror_url)
|
||||
infodict_packages = _load_packages(lines=lines)
|
||||
loaded_packages = _load_packages(lines=lines)
|
||||
packages_to_check = _get_packages_to_check(
|
||||
pkgmirror_url=args.pkgmirror_url, abi=abi,
|
||||
infodict_packages=infodict_packages,
|
||||
passed_packages=passed_packages)
|
||||
loaded_packages=loaded_packages, passed_packages=passed_packages)
|
||||
to_revalidate = _get_to_revalidate(packages_to_check=packages_to_check)
|
||||
if to_revalidate:
|
||||
if not _revalidate_packages(to_revalidate=to_revalidate):
|
||||
|
|
Loading…
Reference in a new issue