Improve handling of layer transfer errors

To improve the reliability of registry->registry layer transfers the
following changes have been made:
- raise an IOError to trigger a retry when the calculated digest does
  not match the expected digest
- any error when writing the blob file will remove the file

This change also ensures the tripleo-container-image-prepare writes
any exception to the log file, it appears that errors are not being
logged and the ansible task does not print its output.

Change-Id: I98f25932a66bb7a9f04cdc1c906223e457fe44b1
Closes-Bug: #1815576
This commit is contained in:
Steve Baker 2019-03-08 11:50:06 +13:00
parent 957c9bd514
commit 7a5b2903e6
4 changed files with 66 additions and 25 deletions

View File

@ -126,8 +126,11 @@ if __name__ == '__main__':
with open(args.environment_file) as f:
env = yaml.safe_load(f)
params = kolla_builder.container_images_prepare_multi(
env, roles_data, cleanup=args.cleanup, dry_run=args.dry_run)
result = yaml.safe_dump(params, default_flow_style=False)
log.info(result)
print(result)
try:
params = kolla_builder.container_images_prepare_multi(
env, roles_data, cleanup=args.cleanup, dry_run=args.dry_run)
result = yaml.safe_dump(params, default_flow_style=False)
log.info(result)
print(result)
except Exception as e:
log.exception('Image prepare failed', e)

View File

@ -52,7 +52,7 @@ def image_tag_from_url(image_url):
return image, tag
def export_stream(target_url, layer, layer_stream):
def export_stream(target_url, layer, layer_stream, verify_digest=True):
image, tag = image_tag_from_url(target_url)
digest = layer['digest']
blob_dir_path = os.path.join(IMAGE_EXPORT_DIR, 'v2', image, 'blobs')
@ -63,21 +63,37 @@ def export_stream(target_url, layer, layer_stream):
length = 0
calc_digest = hashlib.sha256()
with open(blob_path, 'w+b') as f:
for chunk in layer_stream:
if not chunk:
break
f.write(chunk)
calc_digest.update(chunk)
length += len(chunk)
try:
with open(blob_path, 'w+b') as f:
for chunk in layer_stream:
if not chunk:
break
f.write(chunk)
calc_digest.update(chunk)
length += len(chunk)
layer_digest = 'sha256:%s' % calc_digest.hexdigest()
LOG.debug('Calculated layer digest: %s' % layer_digest)
layer_digest = 'sha256:%s' % calc_digest.hexdigest()
LOG.debug('Calculated layer digest: %s' % layer_digest)
# if the original layer is uncompressed the digest may change on export
expected_blob_path = os.path.join(blob_dir_path, '%s.gz' % layer_digest)
if blob_path != expected_blob_path:
os.rename(blob_path, expected_blob_path)
if verify_digest:
if digest != layer_digest:
raise IOError('Expected digest %s '
'does not match calculated %s' %
(digest, layer_digest))
else:
# if the original layer is uncompressed
# the digest may change on export
expected_blob_path = os.path.join(
blob_dir_path, '%s.gz' % layer_digest)
if blob_path != expected_blob_path:
os.rename(blob_path, expected_blob_path)
except Exception as e:
LOG.error('Error while writing blob %s' % blob_path)
# cleanup blob file
if os.path.isfile(blob_path):
os.remove(blob_path)
raise e
layer['digest'] = layer_digest
layer['size'] = length

View File

@ -932,8 +932,7 @@ class PythonImageUploader(BaseImageUploader):
manifest_headers = {'Accept': MEDIA_MANIFEST_V2}
r = session.get(url, headers=manifest_headers, timeout=30)
if r.status_code in (403, 404):
raise ImageNotFoundException('Not found image: %s' %
url.geturl())
raise ImageNotFoundException('Not found image: %s' % url)
r.raise_for_status()
return r.text
@ -1288,11 +1287,12 @@ class PythonImageUploader(BaseImageUploader):
calc_digest = hashlib.sha256()
layer_stream = cls._layer_stream_local(layer_id, calc_digest)
return cls._copy_stream_to_registry(target_url, layer, calc_digest,
layer_stream, session)
layer_stream, session,
verify_digest=False)
@classmethod
def _copy_stream_to_registry(cls, target_url, layer, calc_digest,
layer_stream, session):
layer_stream, session, verify_digest=True):
layer['mediaType'] = MEDIA_BLOB_COMPRESSED
length = 0
upload_resp = None
@ -1300,7 +1300,7 @@ class PythonImageUploader(BaseImageUploader):
export = target_url.netloc in cls.export_registries
if export:
return image_export.export_stream(
target_url, layer, layer_stream)
target_url, layer, layer_stream, verify_digest=verify_digest)
for chunk in layer_stream:
if not chunk:

View File

@ -89,7 +89,7 @@ class TestImageExport(base.TestCase):
calc_digest = hashlib.sha256()
layer_stream = io.BytesIO(blob_compressed)
layer_digest = image_export.export_stream(
target_url, layer, layer_stream
target_url, layer, layer_stream, verify_digest=False
)
self.assertEqual(compressed_digest, layer_digest)
self.assertEqual(compressed_digest, layer['digest'])
@ -104,6 +104,28 @@ class TestImageExport(base.TestCase):
with open(blob_path, 'rb') as f:
self.assertEqual(blob_compressed, f.read())
def test_export_stream_verify_failed(self):
blob_data = six.b('The Blob')
blob_compressed = zlib.compress(blob_data)
calc_digest = hashlib.sha256()
calc_digest.update(blob_compressed)
target_url = urlparse('docker://localhost:8787/t/nova-api:latest')
layer = {
'digest': 'sha256:somethingelse'
}
calc_digest = hashlib.sha256()
layer_stream = io.BytesIO(blob_compressed)
self.assertRaises(IOError, image_export.export_stream,
target_url, layer, layer_stream,
verify_digest=True)
blob_dir = os.path.join(image_export.IMAGE_EXPORT_DIR,
'v2/t/nova-api/blobs')
blob_path = os.path.join(blob_dir, 'sha256:somethingelse.gz')
self.assertTrue(os.path.isdir(blob_dir))
self.assertFalse(os.path.isfile(blob_path))
def test_cross_repo_mount(self):
target_url = urlparse('docker://localhost:8787/t/nova-api:latest')
other_url = urlparse('docker://localhost:8787/t/nova-compute:latest')