Commit d716c541 authored by Léo-Paul Géneau's avatar Léo-Paul Géneau 👾

networkcache: use multiarch to support arm

Use `gcc -dumpmachine` instead of `platform.machine()` to discriminate
architecture
parent 833d0a6b
...@@ -27,7 +27,6 @@ ...@@ -27,7 +27,6 @@
# #
############################################################################## ##############################################################################
import ast
import hashlib import hashlib
import json import json
import re import re
...@@ -36,7 +35,6 @@ import sys ...@@ -36,7 +35,6 @@ import sys
import prettytable import prettytable
from slapos.grid import networkcache from slapos.grid import networkcache
from slapos.grid.distribution import distribution_tuple
from slapos.cli.config import ConfigCommand from slapos.cli.config import ConfigCommand
from slapos.util import str2bytes from slapos.util import str2bytes
...@@ -77,8 +75,10 @@ def looks_like_md5(s): ...@@ -77,8 +75,10 @@ def looks_like_md5(s):
return re.match('[0-9a-f]{32}', s) return re.match('[0-9a-f]{32}', s)
def ostuple(info_dict): def infotuple(entry):
return (info_dict['machine'],) + ast.literal_eval(info_dict['os']) info_dict = networkcache.loadJsonEntry(entry[0])
return info_dict['multiarch'], info_dict['os'], entry[1]
def do_lookup(logger, cache_dir, cache_url, signature_certificate_list, def do_lookup(logger, cache_dir, cache_url, signature_certificate_list,
software_url): software_url):
...@@ -99,12 +99,13 @@ def do_lookup(logger, cache_dir, cache_url, signature_certificate_list, ...@@ -99,12 +99,13 @@ def do_lookup(logger, cache_dir, cache_url, signature_certificate_list,
logger.info('Object found in cache, but has no binary entries.') logger.info('Object found in cache, but has no binary entries.')
return 0 return 0
ostable = sorted(ostuple(json.loads(entry[0])) for entry in entries) pt = prettytable.PrettyTable(['multiarch', 'distribution', 'version', 'id', 'compatible?'])
pt = prettytable.PrettyTable(['machine', 'distribution', 'version', 'id', 'compatible?']) machine_info = networkcache.machine_info_tuple()
for os in ostable: for multiarch, os, verified in sorted(map(infotuple, entries)):
compatible = 'yes' if networkcache.is_compatible(os[0], os[1:]) else 'no' row = [multiarch] + os
pt.add_row([os[0], os[1], os[2], os[3], compatible]) row.append('yes' if networkcache.is_compatible(machine_info, (multiarch, os)) else 'no')
pt.add_row(row)
meta = json.loads(entries[0][0]) meta = json.loads(entries[0][0])
logger.info('Software URL: %s', meta['software_url']) logger.info('Software URL: %s', meta['software_url'])
......
...@@ -17,6 +17,7 @@ import ast ...@@ -17,6 +17,7 @@ import ast
import json import json
import platform import platform
import shutil import shutil
import subprocess
import traceback import traceback
from slapos.grid.distribution import os_matches, distribution_tuple from slapos.grid.distribution import os_matches, distribution_tuple
...@@ -53,8 +54,26 @@ def fallback_call(function): ...@@ -53,8 +54,26 @@ def fallback_call(function):
return wrapper return wrapper
def is_compatible(machine, os): def multiarch():
return machine == platform.machine() and os_matches(os, distribution_tuple()) return subprocess.check_output(
('gcc', '-dumpmachine'), universal_newlines=True,).rstrip()
def machine_info_tuple():
return multiarch(), distribution_tuple()
def is_compatible(machine_info_tuple, required_info_tuple):
return machine_info_tuple[0] == required_info_tuple[0] \
and os_matches(required_info_tuple[1], machine_info_tuple[1])
def loadJsonEntry(jentry):
entry = json.loads(jentry)
if 'multiarch' not in entry and entry['machine'] == 'x86_64': # BBB
entry['multiarch'] = 'x86_64-linux-gnu'
entry['os'] = list(ast.literal_eval(entry['os']))
return entry
def download_entry_list(cache_url, dir_url, key, logger, def download_entry_list(cache_url, dir_url, key, logger,
...@@ -92,22 +111,15 @@ def download_network_cached(cache_url, dir_url, software_url, software_root, ...@@ -92,22 +111,15 @@ def download_network_cached(cache_url, dir_url, software_url, software_root,
logger.info('Downloading %s binary from network cache.' % software_url) logger.info('Downloading %s binary from network cache.' % software_url)
try: try:
file_descriptor = None file_descriptor = None
json_entry_list = nc.select_generic(key) machine_info = machine_info_tuple()
for entry in json_entry_list: for entry, _ in nc.select_generic(key):
json_information, _ = entry
try: try:
tags = json.loads(json_information) tags = loadJsonEntry(entry)
if not is_compatible(tags.get('machine'), ast.literal_eval(tags.get('os'))): if is_compatible(machine_info, (tags['multiarch'], tags['os'])):
continue file_descriptor = nc.download(tags['sha512'])
if tags.get('software_url') != software_url:
continue
if tags.get('software_root') != software_root:
continue
sha512 = tags.get('sha512')
file_descriptor = nc.download(sha512)
break break
except Exception: except Exception:
continue pass
if file_descriptor is not None: if file_descriptor is not None:
f = open(path, 'w+b') f = open(path, 'w+b')
try: try:
...@@ -143,8 +155,8 @@ def upload_network_cached(software_root, software_url, cached_key, ...@@ -143,8 +155,8 @@ def upload_network_cached(software_root, software_url, cached_key,
urlmd5="urlmd5", urlmd5="urlmd5",
software_url=software_url, software_url=software_url,
software_root=software_root, software_root=software_root,
machine=platform.machine(), multiarch=multiarch(),
os=str(distribution_tuple()) os=distribution_tuple(),
) )
f = open(path, 'r') f = open(path, 'r')
......
...@@ -103,13 +103,13 @@ class TestCliCache(CliMixin): ...@@ -103,13 +103,13 @@ class TestCliCache(CliMixin):
self.logger.info.assert_any_call('Software URL: %s', self.logger.info.assert_any_call('Software URL: %s',
u'https://lab.nexedi.com/nexedi/slapos/raw/1.0.102/software/slaprunner/software.cfg') u'https://lab.nexedi.com/nexedi/slapos/raw/1.0.102/software/slaprunner/software.cfg')
self.logger.info.assert_any_call('MD5: %s', 'cccdc51a07e8c575c880f2d70dd4d458') self.logger.info.assert_any_call('MD5: %s', 'cccdc51a07e8c575c880f2d70dd4d458')
self.logger.info.assert_any_call(u'--------------------------------------------------') self.logger.info.assert_any_call(u'-----------------------------------------------------------')
self.logger.info.assert_any_call(u' machine distribution version id compatible? ') self.logger.info.assert_any_call(u' multiarch distribution version id compatible? ')
self.logger.info.assert_any_call(u'--------------------------------------------------') self.logger.info.assert_any_call(u'-----------------------------------------------------------')
self.logger.info.assert_any_call(u' x86_64 CentOS Linux 7.5.1804 Core no ') self.logger.info.assert_any_call(u' x86_64-linux-gnu CentOS Linux 7.5.1804 Core no ')
self.logger.info.assert_any_call(u' x86_64 Ubuntu 18.04 bionic no ') self.logger.info.assert_any_call(u' x86_64-linux-gnu Ubuntu 18.04 bionic no ')
# Omit some lines as it may fail depending of the OS # Omit some lines as it may fail depending of the OS
self.logger.info.assert_any_call(u'--------------------------------------------------') self.logger.info.assert_any_call(u'-----------------------------------------------------------')
def test_uncached_binary(self): def test_uncached_binary(self):
self.assertEqual(10, cache_do_lookup( self.assertEqual(10, cache_do_lookup(
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment