2015-06-22 21:37:13 +00:00
|
|
|
import logging
|
2015-08-12 20:39:32 +00:00
|
|
|
import json
|
2016-02-12 15:39:27 +00:00
|
|
|
import hashlib
|
2015-06-22 21:37:13 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
from collections import namedtuple, OrderedDict
|
|
|
|
from datetime import datetime
|
2016-02-09 19:16:39 +00:00
|
|
|
from functools import wraps
|
2015-06-22 21:37:13 +00:00
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
import jwt.utils
|
|
|
|
|
|
|
|
from peewee import IntegrityError
|
|
|
|
from flask import make_response, request, url_for
|
|
|
|
from jwkest.jws import SIGNER_ALGS, keyrep
|
|
|
|
|
|
|
|
import features
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
from app import docker_v2_signing_key, app
|
2015-12-09 20:07:37 +00:00
|
|
|
from auth.registry_jwt_auth import process_registry_jwt_auth
|
2016-03-09 21:20:28 +00:00
|
|
|
from endpoints.common import parse_repository_name
|
2015-06-22 21:37:13 +00:00
|
|
|
from endpoints.decorators import anon_protect
|
2015-08-12 20:39:32 +00:00
|
|
|
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
|
2016-01-21 20:40:51 +00:00
|
|
|
from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid,
|
|
|
|
NameInvalid)
|
2015-08-25 20:02:21 +00:00
|
|
|
from endpoints.trackhelper import track_and_log
|
|
|
|
from endpoints.notificationhelper import spawn_notification
|
2016-05-31 20:43:49 +00:00
|
|
|
from util.registry.replication import queue_storage_replication
|
2016-07-26 00:50:35 +00:00
|
|
|
from util.names import VALID_TAG_PATTERN
|
2015-07-06 19:00:07 +00:00
|
|
|
from digest import digest_tools
|
2015-08-12 20:39:32 +00:00
|
|
|
from data import model
|
2015-12-03 21:04:17 +00:00
|
|
|
from data.database import RepositoryTag
|
2015-06-22 21:37:13 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
BASE_MANIFEST_ROUTE = '/<repopath:repository>/manifests/<regex("{0}"):manifest_ref>'
|
2015-08-12 20:39:32 +00:00
|
|
|
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
|
|
|
MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN)
|
|
|
|
|
2016-01-25 21:39:59 +00:00
|
|
|
# From: https://github.com/docker/distribution/blob/47a064d4195a9b56133891bbb13620c3ac83a827/manifest/schema1/manifest.go#L18
|
|
|
|
MANIFEST_CONTENT_TYPE = 'application/vnd.docker.distribution.manifest.v1+prettyjws'
|
2016-02-09 19:16:39 +00:00
|
|
|
MANIFEST2_SCHEMA2_CONTENT_TYPES = ['application/vnd.docker.distribution.manifest.v2+json',
|
|
|
|
'application/vnd.docker.distribution.manifest.list.v2+json']
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
ISO_DATETIME_FORMAT_ZULU = '%Y-%m-%dT%H:%M:%SZ'
|
|
|
|
JWS_ALGORITHM = 'RS256'
|
|
|
|
|
|
|
|
|
|
|
|
ImageMetadata = namedtuple('ImageMetadata', ['digest', 'v1_metadata', 'v1_metadata_str'])
|
|
|
|
ExtractedV1Metadata = namedtuple('ExtractedV1Metadata', ['docker_id', 'parent', 'created',
|
2016-07-18 22:20:00 +00:00
|
|
|
'comment', 'command', 'labels'])
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
_SIGNATURES_KEY = 'signatures'
|
|
|
|
_PROTECTED_KEY = 'protected'
|
|
|
|
_FORMAT_LENGTH_KEY = 'formatLength'
|
|
|
|
_FORMAT_TAIL_KEY = 'formatTail'
|
|
|
|
_REPO_NAME_KEY = 'name'
|
|
|
|
_REPO_TAG_KEY = 'tag'
|
|
|
|
_FS_LAYERS_KEY = 'fsLayers'
|
|
|
|
_HISTORY_KEY = 'history'
|
|
|
|
_BLOB_SUM_KEY = 'blobSum'
|
|
|
|
_V1_COMPAT_KEY = 'v1Compatibility'
|
|
|
|
_ARCH_KEY = 'architecture'
|
|
|
|
_SCHEMA_VER = 'schemaVersion'
|
2015-06-22 21:37:13 +00:00
|
|
|
|
|
|
|
|
2015-07-06 19:00:07 +00:00
|
|
|
class SignedManifest(object):
|
|
|
|
|
2016-02-12 22:55:33 +00:00
|
|
|
def __init__(self, manifest_bytes, validate=True):
|
2015-07-06 19:00:07 +00:00
|
|
|
self._bytes = manifest_bytes
|
|
|
|
|
2015-09-24 20:17:42 +00:00
|
|
|
self._parsed = json.loads(manifest_bytes)
|
2015-08-12 20:39:32 +00:00
|
|
|
self._signatures = self._parsed[_SIGNATURES_KEY]
|
|
|
|
self._tag = self._parsed[_REPO_TAG_KEY]
|
2015-07-06 19:00:07 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name_tuple = self._parsed[_REPO_NAME_KEY].split('/')
|
|
|
|
if len(repo_name_tuple) > 1:
|
|
|
|
self._namespace, self._repo_name = repo_name_tuple
|
|
|
|
elif len(repo_name_tuple) == 1:
|
|
|
|
self._namespace = ''
|
|
|
|
self._repo_name = repo_name_tuple[0]
|
|
|
|
else:
|
|
|
|
raise ValueError('repo_name has too many or too few pieces')
|
|
|
|
|
2016-02-12 22:55:33 +00:00
|
|
|
if validate:
|
|
|
|
self._validate()
|
2015-07-06 19:00:07 +00:00
|
|
|
|
|
|
|
def _validate(self):
|
2015-10-20 06:08:45 +00:00
|
|
|
for signature in self._signatures:
|
2016-03-09 21:20:28 +00:00
|
|
|
bytes_to_verify = '{0}.{1}'.format(signature['protected'],
|
|
|
|
jwt.utils.base64url_encode(self.payload))
|
2015-10-20 06:08:45 +00:00
|
|
|
signer = SIGNER_ALGS[signature['header']['alg']]
|
|
|
|
key = keyrep(signature['header']['jwk'])
|
|
|
|
gk = key.get_key()
|
|
|
|
sig = jwt.utils.base64url_decode(signature['signature'].encode('utf-8'))
|
|
|
|
verified = signer.verify(bytes_to_verify, sig, gk)
|
|
|
|
if not verified:
|
|
|
|
raise ValueError('manifest file failed signature verification')
|
2015-07-06 19:00:07 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def signatures(self):
|
|
|
|
return self._signatures
|
|
|
|
|
|
|
|
@property
|
|
|
|
def namespace(self):
|
|
|
|
return self._namespace
|
|
|
|
|
|
|
|
@property
|
|
|
|
def repo_name(self):
|
|
|
|
return self._repo_name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def tag(self):
|
|
|
|
return self._tag
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
@property
|
|
|
|
def bytes(self):
|
|
|
|
return self._bytes
|
|
|
|
|
|
|
|
@property
|
|
|
|
def digest(self):
|
|
|
|
return digest_tools.sha256_digest(self.payload)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def layers(self):
|
|
|
|
""" Returns a generator of objects that have the blobSum and v1Compatibility keys in them,
|
2016-03-14 19:24:18 +00:00
|
|
|
starting from the base image and working toward the leaf node.
|
2015-08-12 20:39:32 +00:00
|
|
|
"""
|
|
|
|
for blob_sum_obj, history_obj in reversed(zip(self._parsed[_FS_LAYERS_KEY],
|
|
|
|
self._parsed[_HISTORY_KEY])):
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
image_digest = digest_tools.Digest.parse_digest(blob_sum_obj[_BLOB_SUM_KEY])
|
|
|
|
except digest_tools.InvalidDigestException:
|
2016-02-12 15:39:27 +00:00
|
|
|
err_message = 'could not parse manifest digest: %s' % blob_sum_obj[_BLOB_SUM_KEY]
|
|
|
|
raise ManifestInvalid(detail={'message': err_message})
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
metadata_string = history_obj[_V1_COMPAT_KEY]
|
|
|
|
|
2015-09-24 20:17:42 +00:00
|
|
|
v1_metadata = json.loads(metadata_string)
|
2015-08-12 20:39:32 +00:00
|
|
|
command_list = v1_metadata.get('container_config', {}).get('Cmd', None)
|
|
|
|
command = json.dumps(command_list) if command_list else None
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
if not 'id' in v1_metadata:
|
|
|
|
raise ManifestInvalid(detail={'message': 'invalid manifest v1 history'})
|
|
|
|
|
2016-07-18 22:20:00 +00:00
|
|
|
labels = v1_metadata.get('config', {}).get('Labels', {}) or {}
|
2015-08-12 20:39:32 +00:00
|
|
|
extracted = ExtractedV1Metadata(v1_metadata['id'], v1_metadata.get('parent'),
|
|
|
|
v1_metadata.get('created'), v1_metadata.get('comment'),
|
2016-07-18 22:20:00 +00:00
|
|
|
command, labels)
|
2015-08-12 20:39:32 +00:00
|
|
|
yield ImageMetadata(image_digest, extracted, metadata_string)
|
|
|
|
|
2015-07-06 19:00:07 +00:00
|
|
|
@property
|
|
|
|
def payload(self):
|
2015-09-24 20:17:42 +00:00
|
|
|
protected = str(self._signatures[0][_PROTECTED_KEY])
|
|
|
|
parsed_protected = json.loads(jwt.utils.base64url_decode(protected))
|
2015-08-12 20:39:32 +00:00
|
|
|
signed_content_head = self._bytes[:parsed_protected[_FORMAT_LENGTH_KEY]]
|
2015-09-24 20:17:42 +00:00
|
|
|
signed_content_tail = jwt.utils.base64url_decode(str(parsed_protected[_FORMAT_TAIL_KEY]))
|
2015-07-06 19:00:07 +00:00
|
|
|
return signed_content_head + signed_content_tail
|
2015-06-22 21:37:13 +00:00
|
|
|
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
class SignedManifestBuilder(object):
|
|
|
|
""" Class which represents a manifest which is currently being built.
|
|
|
|
"""
|
2016-03-09 21:20:28 +00:00
|
|
|
def __init__(self, namespace_name, repo_name, tag, architecture='amd64', schema_ver=1):
|
|
|
|
repo_name_key = '{0}/{1}'.format(namespace_name, repo_name)
|
|
|
|
if namespace_name == '':
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name_key = repo_name
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
self._base_payload = {
|
|
|
|
_REPO_TAG_KEY: tag,
|
2016-01-21 20:40:51 +00:00
|
|
|
_REPO_NAME_KEY: repo_name_key,
|
2015-08-12 20:39:32 +00:00
|
|
|
_ARCH_KEY: architecture,
|
|
|
|
_SCHEMA_VER: schema_ver,
|
|
|
|
}
|
|
|
|
|
|
|
|
self._fs_layer_digests = []
|
|
|
|
self._history = []
|
|
|
|
|
|
|
|
def add_layer(self, layer_digest, v1_json_metadata):
|
|
|
|
self._fs_layer_digests.append({
|
|
|
|
_BLOB_SUM_KEY: layer_digest,
|
|
|
|
})
|
|
|
|
self._history.append({
|
|
|
|
_V1_COMPAT_KEY: v1_json_metadata,
|
|
|
|
})
|
2016-03-14 19:24:18 +00:00
|
|
|
return self
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
def build(self, json_web_key):
|
|
|
|
""" Build the payload and sign it, returning a SignedManifest object.
|
|
|
|
"""
|
|
|
|
payload = OrderedDict(self._base_payload)
|
|
|
|
payload.update({
|
|
|
|
_HISTORY_KEY: self._history,
|
|
|
|
_FS_LAYERS_KEY: self._fs_layer_digests,
|
|
|
|
})
|
|
|
|
|
|
|
|
payload_str = json.dumps(payload, indent=3)
|
|
|
|
|
|
|
|
split_point = payload_str.rfind('\n}')
|
|
|
|
|
|
|
|
protected_payload = {
|
|
|
|
'formatTail': jwt.utils.base64url_encode(payload_str[split_point:]),
|
|
|
|
'formatLength': split_point,
|
|
|
|
'time': datetime.utcnow().strftime(ISO_DATETIME_FORMAT_ZULU),
|
|
|
|
}
|
|
|
|
protected = jwt.utils.base64url_encode(json.dumps(protected_payload))
|
|
|
|
logger.debug('Generated protected block: %s', protected)
|
|
|
|
|
|
|
|
bytes_to_sign = '{0}.{1}'.format(protected, jwt.utils.base64url_encode(payload_str))
|
|
|
|
|
|
|
|
signer = SIGNER_ALGS[JWS_ALGORITHM]
|
|
|
|
signature = jwt.utils.base64url_encode(signer.sign(bytes_to_sign, json_web_key.get_key()))
|
|
|
|
logger.debug('Generated signature: %s', signature)
|
|
|
|
|
2015-10-26 20:40:19 +00:00
|
|
|
public_members = set(json_web_key.public_members)
|
|
|
|
public_key = {comp: value for comp, value in json_web_key.to_dict().items()
|
|
|
|
if comp in public_members}
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
signature_block = {
|
|
|
|
'header': {
|
2015-10-26 20:40:19 +00:00
|
|
|
'jwk': public_key,
|
2015-08-12 20:39:32 +00:00
|
|
|
'alg': JWS_ALGORITHM,
|
|
|
|
},
|
|
|
|
'signature': signature,
|
|
|
|
_PROTECTED_KEY: protected,
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.debug('Encoded signature block: %s', json.dumps(signature_block))
|
|
|
|
|
|
|
|
payload.update({
|
|
|
|
_SIGNATURES_KEY: [signature_block],
|
|
|
|
})
|
|
|
|
|
|
|
|
return SignedManifest(json.dumps(payload, indent=3))
|
|
|
|
|
|
|
|
|
|
|
|
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
|
2016-03-09 21:20:28 +00:00
|
|
|
@parse_repository_name()
|
2016-03-09 23:09:20 +00:00
|
|
|
@process_registry_jwt_auth(scopes=['pull'])
|
2015-08-12 20:39:32 +00:00
|
|
|
@require_repo_read
|
|
|
|
@anon_protect
|
2016-03-09 21:20:28 +00:00
|
|
|
def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
|
2015-08-12 20:39:32 +00:00
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
manifest = model.tag.load_tag_manifest(namespace_name, repo_name, manifest_ref)
|
2015-08-12 20:39:32 +00:00
|
|
|
except model.InvalidManifestException:
|
2015-12-03 21:04:17 +00:00
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
model.tag.get_active_tag(namespace_name, repo_name, manifest_ref)
|
2015-12-03 21:04:17 +00:00
|
|
|
except RepositoryTag.DoesNotExist:
|
2016-03-09 21:20:28 +00:00
|
|
|
raise ManifestUnknown()
|
2015-12-03 21:04:17 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
manifest = _generate_and_store_manifest(namespace_name, repo_name, manifest_ref)
|
2015-08-12 20:39:32 +00:00
|
|
|
except model.DataModelException:
|
2016-03-09 21:20:28 +00:00
|
|
|
logger.exception('Exception when generating manifest for %s/%s:%s', namespace_name, repo_name,
|
2015-08-12 20:39:32 +00:00
|
|
|
manifest_ref)
|
|
|
|
raise ManifestUnknown()
|
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
2015-11-21 02:29:57 +00:00
|
|
|
if repo is not None:
|
2015-12-14 22:41:16 +00:00
|
|
|
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01)
|
2015-11-21 02:29:57 +00:00
|
|
|
|
2015-11-23 18:56:34 +00:00
|
|
|
response = make_response(manifest.json_data, 200)
|
2016-01-25 21:39:59 +00:00
|
|
|
response.headers['Content-Type'] = MANIFEST_CONTENT_TYPE
|
2015-11-23 18:56:34 +00:00
|
|
|
response.headers['Docker-Content-Digest'] = manifest.digest
|
|
|
|
return response
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET'])
|
2016-03-09 21:20:28 +00:00
|
|
|
@parse_repository_name()
|
2016-03-09 23:09:20 +00:00
|
|
|
@process_registry_jwt_auth(scopes=['pull'])
|
2015-06-22 21:37:13 +00:00
|
|
|
@require_repo_read
|
|
|
|
@anon_protect
|
2016-03-09 21:20:28 +00:00
|
|
|
def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref):
|
2015-08-12 20:39:32 +00:00
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
manifest = model.tag.load_manifest_by_digest(namespace_name, repo_name, manifest_ref)
|
2015-08-12 20:39:32 +00:00
|
|
|
except model.InvalidManifestException:
|
|
|
|
# Without a tag name to reference, we can't make an attempt to generate the manifest
|
|
|
|
raise ManifestUnknown()
|
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
2015-11-21 02:29:57 +00:00
|
|
|
if repo is not None:
|
|
|
|
track_and_log('pull_repo', repo)
|
|
|
|
|
2015-11-23 18:56:34 +00:00
|
|
|
response = make_response(manifest.json_data, 200)
|
2016-01-25 21:39:59 +00:00
|
|
|
response.headers['Content-Type'] = MANIFEST_CONTENT_TYPE
|
2015-11-23 18:56:34 +00:00
|
|
|
response.headers['Docker-Content-Digest'] = manifest.digest
|
|
|
|
return response
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
|
2016-02-09 19:16:39 +00:00
|
|
|
def _reject_manifest2_schema2(func):
|
|
|
|
@wraps(func)
|
|
|
|
def wrapped(*args, **kwargs):
|
|
|
|
if request.content_type in MANIFEST2_SCHEMA2_CONTENT_TYPES:
|
|
|
|
raise ManifestInvalid(detail={'message': 'manifest schema version not supported'},
|
|
|
|
http_status_code=415)
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
return wrapped
|
|
|
|
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['PUT'])
|
2016-06-02 16:46:20 +00:00
|
|
|
@_reject_manifest2_schema2
|
2016-03-09 21:20:28 +00:00
|
|
|
@parse_repository_name()
|
2016-03-09 23:09:20 +00:00
|
|
|
@process_registry_jwt_auth(scopes=['pull', 'push'])
|
2015-08-12 20:39:32 +00:00
|
|
|
@require_repo_write
|
|
|
|
@anon_protect
|
2016-03-09 21:20:28 +00:00
|
|
|
def write_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
|
2015-10-02 18:01:12 +00:00
|
|
|
try:
|
|
|
|
manifest = SignedManifest(request.data)
|
2016-02-12 15:39:27 +00:00
|
|
|
except ValueError:
|
|
|
|
raise ManifestInvalid(detail={'message': 'could not parse manifest'})
|
2015-10-02 18:01:12 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
if manifest.tag != manifest_ref:
|
|
|
|
raise TagInvalid()
|
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
return _write_manifest(namespace_name, repo_name, manifest)
|
2015-06-22 21:37:13 +00:00
|
|
|
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['PUT'])
|
2016-06-02 16:46:20 +00:00
|
|
|
@_reject_manifest2_schema2
|
2016-03-09 21:20:28 +00:00
|
|
|
@parse_repository_name()
|
2016-03-09 23:09:20 +00:00
|
|
|
@process_registry_jwt_auth(scopes=['pull', 'push'])
|
2015-06-22 21:37:13 +00:00
|
|
|
@require_repo_write
|
|
|
|
@anon_protect
|
2016-03-09 21:20:28 +00:00
|
|
|
def write_manifest_by_digest(namespace_name, repo_name, manifest_ref):
|
2015-10-02 18:01:12 +00:00
|
|
|
try:
|
|
|
|
manifest = SignedManifest(request.data)
|
|
|
|
except ValueError:
|
2016-02-12 15:39:27 +00:00
|
|
|
raise ManifestInvalid(detail={'message': 'could not parse manifest'})
|
2015-10-02 18:01:12 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
if manifest.digest != manifest_ref:
|
2016-02-12 15:39:27 +00:00
|
|
|
raise ManifestInvalid(detail={'message': 'manifest digest mismatch'})
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
return _write_manifest(namespace_name, repo_name, manifest)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def _updated_v1_metadata(v1_metadata_json, updated_id_map):
|
|
|
|
parsed = json.loads(v1_metadata_json)
|
|
|
|
parsed['id'] = updated_id_map[parsed['id']]
|
|
|
|
|
2016-02-15 16:02:52 +00:00
|
|
|
if parsed.get('parent') and parsed['parent'] in updated_id_map:
|
2016-02-12 15:39:27 +00:00
|
|
|
parsed['parent'] = updated_id_map[parsed['parent']]
|
|
|
|
|
|
|
|
if parsed.get('container_config', {}).get('Image'):
|
|
|
|
existing_image = parsed['container_config']['Image']
|
|
|
|
if existing_image in updated_id_map:
|
|
|
|
parsed['container_config']['image'] = updated_id_map[existing_image]
|
|
|
|
|
|
|
|
return json.dumps(parsed)
|
|
|
|
|
|
|
|
|
2016-03-14 19:24:18 +00:00
|
|
|
def _write_manifest_itself(namespace_name, repo_name, manifest):
|
2016-01-21 20:40:51 +00:00
|
|
|
# Ensure that the manifest is for this repository. If the manifest's namespace is empty, then
|
|
|
|
# it is for the library namespace and we need an extra check.
|
|
|
|
if (manifest.namespace == '' and features.LIBRARY_SUPPORT and
|
2016-03-09 21:20:28 +00:00
|
|
|
namespace_name == app.config['LIBRARY_NAMESPACE']):
|
2016-01-21 20:40:51 +00:00
|
|
|
# This is a library manifest. All good.
|
|
|
|
pass
|
2016-03-09 21:20:28 +00:00
|
|
|
elif manifest.namespace != namespace_name:
|
2016-01-21 20:40:51 +00:00
|
|
|
raise NameInvalid()
|
|
|
|
|
|
|
|
if manifest.repo_name != repo_name:
|
2015-08-12 20:39:32 +00:00
|
|
|
raise NameInvalid()
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
# Ensure that the repository exists.
|
2016-03-09 21:20:28 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
2015-09-29 21:53:39 +00:00
|
|
|
if repo is None:
|
|
|
|
raise NameInvalid()
|
|
|
|
|
|
|
|
# Lookup all the images and their parent images (if any) inside the manifest. This will let us
|
|
|
|
# know which V1 images we need to synthesize and which ones are invalid.
|
|
|
|
layers = list(manifest.layers)
|
|
|
|
|
2015-11-19 09:03:14 +00:00
|
|
|
docker_image_ids = {mdata.v1_metadata.docker_id for mdata in layers}
|
|
|
|
parent_image_ids = {mdata.v1_metadata.parent for mdata in layers
|
|
|
|
if mdata.v1_metadata.parent}
|
|
|
|
all_image_ids = list(docker_image_ids | parent_image_ids)
|
2015-09-29 21:53:39 +00:00
|
|
|
|
|
|
|
images_query = model.image.lookup_repository_images(repo, all_image_ids)
|
|
|
|
images_map = {image.docker_image_id: image for image in images_query}
|
|
|
|
|
|
|
|
# Lookup the storages associated with each blob in the manifest.
|
2015-11-19 09:03:14 +00:00
|
|
|
checksums = list({str(mdata.digest) for mdata in manifest.layers})
|
2015-11-06 23:18:29 +00:00
|
|
|
storage_query = model.storage.lookup_repo_storages_by_content_checksum(repo, checksums)
|
|
|
|
storage_map = {storage.content_checksum: storage for storage in storage_query}
|
2015-09-29 21:53:39 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Ensure that we have valid V1 docker IDs. If Docker gives us a V1 layer ID pointing to
|
|
|
|
# a storage with a content checksum different from the existing, then we need to rewrite
|
|
|
|
# the Docker ID to ensure consistency.
|
2015-08-12 20:39:32 +00:00
|
|
|
tag_name = manifest.tag
|
2016-02-12 20:57:44 +00:00
|
|
|
has_rewritten_ids = False
|
2016-02-12 15:39:27 +00:00
|
|
|
updated_id_map = {}
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2016-03-14 19:24:18 +00:00
|
|
|
# Synthesized image id hash. Can be used to pull a "content addressable" image id out of thin air.
|
|
|
|
digest_history = hashlib.sha256()
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
for mdata in layers:
|
|
|
|
digest_str = str(mdata.digest)
|
|
|
|
v1_mdata = mdata.v1_metadata
|
2016-02-12 20:57:44 +00:00
|
|
|
working_docker_id = v1_mdata.docker_id
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-03-14 19:24:18 +00:00
|
|
|
# Update our digest_history hash for the new layer data.
|
|
|
|
digest_history.update(digest_str)
|
|
|
|
digest_history.update("@")
|
|
|
|
digest_history.update(mdata.v1_metadata_str.encode('utf-8'))
|
|
|
|
digest_history.update("|")
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Ensure that all blobs exist.
|
|
|
|
blob_storage = storage_map.get(digest_str)
|
|
|
|
if blob_storage is None:
|
|
|
|
raise BlobUnknown(detail={'digest': digest_str})
|
2015-09-29 21:53:39 +00:00
|
|
|
|
2016-02-12 20:57:44 +00:00
|
|
|
# Ensure that the V1 image's storage matches the V2 blob. If not, we've found
|
|
|
|
# a data inconsistency and need to create a new layer ID for the V1 image, and all images
|
|
|
|
# that follow it in the ancestry chain.
|
|
|
|
if ((v1_mdata.docker_id in images_map and
|
|
|
|
images_map[v1_mdata.docker_id].storage.content_checksum != digest_str) or
|
|
|
|
has_rewritten_ids):
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-03-14 19:24:18 +00:00
|
|
|
working_docker_id = digest_history.hexdigest()
|
|
|
|
logger.warning('Rewriting docker_id %s/%s %s -> %s', namespace_name, repo_name,
|
|
|
|
v1_mdata.docker_id, working_docker_id)
|
2016-02-12 20:57:44 +00:00
|
|
|
has_rewritten_ids = True
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-02-12 20:57:44 +00:00
|
|
|
# Store the new docker id in the map
|
|
|
|
updated_id_map[v1_mdata.docker_id] = working_docker_id
|
2015-09-29 21:53:39 +00:00
|
|
|
|
|
|
|
# Lookup the parent image for the layer, if any.
|
|
|
|
parent_image = None
|
|
|
|
if v1_mdata.parent is not None:
|
|
|
|
parent_image = images_map.get(v1_mdata.parent)
|
|
|
|
if parent_image is None:
|
|
|
|
msg = 'Parent not found with docker image id {0}'.format(v1_mdata.parent)
|
|
|
|
raise ManifestInvalid(detail={'message': msg})
|
|
|
|
|
|
|
|
# Synthesize and store the v1 metadata in the db.
|
2016-02-12 20:57:44 +00:00
|
|
|
v1_metadata_json = mdata.v1_metadata_str
|
|
|
|
if has_rewritten_ids:
|
|
|
|
v1_metadata_json = _updated_v1_metadata(mdata.v1_metadata_str, updated_id_map)
|
|
|
|
|
|
|
|
image = model.image.synthesize_v1_image(repo, blob_storage, working_docker_id,
|
2015-09-29 21:53:39 +00:00
|
|
|
v1_mdata.created, v1_mdata.comment, v1_mdata.command,
|
2016-02-12 20:57:44 +00:00
|
|
|
v1_metadata_json, parent_image)
|
2015-09-29 21:53:39 +00:00
|
|
|
images_map[v1_mdata.docker_id] = image
|
|
|
|
|
|
|
|
if not layers:
|
2015-08-12 20:39:32 +00:00
|
|
|
# The manifest doesn't actually reference any layers!
|
|
|
|
raise ManifestInvalid(detail={'message': 'manifest does not reference any layers'})
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
# Store the manifest pointing to the tag.
|
2016-02-12 15:39:27 +00:00
|
|
|
manifest_digest = manifest.digest
|
2016-02-12 20:57:44 +00:00
|
|
|
leaf_layer_id = images_map[layers[-1].v1_metadata.docker_id].docker_image_id
|
2016-07-18 22:20:00 +00:00
|
|
|
tag_manifest, manifest_created = model.tag.store_tag_manifest(namespace_name, repo_name, tag_name,
|
|
|
|
leaf_layer_id, manifest_digest,
|
|
|
|
manifest.bytes)
|
|
|
|
if manifest_created:
|
|
|
|
for key, value in layers[-1].v1_metadata.labels.iteritems():
|
|
|
|
model.label.create_manifest_label(tag_manifest, key, value, 'manifest')
|
2016-05-31 20:43:49 +00:00
|
|
|
|
|
|
|
# Queue all blob manifests for replication.
|
|
|
|
# TODO(jschorr): Find a way to optimize this insertion.
|
|
|
|
if features.STORAGE_REPLICATION:
|
|
|
|
for mdata in layers:
|
|
|
|
digest_str = str(mdata.digest)
|
|
|
|
blob_storage = storage_map.get(digest_str)
|
|
|
|
queue_storage_replication(namespace_name, blob_storage)
|
|
|
|
|
2016-03-14 19:24:18 +00:00
|
|
|
return (repo, tag_name, manifest_digest)
|
|
|
|
|
|
|
|
|
|
|
|
def _write_manifest(namespace_name, repo_name, manifest):
|
|
|
|
(repo, tag_name, manifest_digest) = _write_manifest_itself(namespace_name, repo_name, manifest)
|
2015-07-06 19:00:07 +00:00
|
|
|
|
2015-08-25 20:02:21 +00:00
|
|
|
# Spawn the repo_push event.
|
2015-09-29 21:53:39 +00:00
|
|
|
event_data = {
|
|
|
|
'updated_tags': [tag_name],
|
|
|
|
}
|
2015-08-25 20:02:21 +00:00
|
|
|
|
2016-04-20 17:00:21 +00:00
|
|
|
track_and_log('push_repo', repo, tag=tag_name)
|
2015-09-29 21:53:39 +00:00
|
|
|
spawn_notification(repo, 'repo_push', event_data)
|
2015-08-25 20:02:21 +00:00
|
|
|
|
2015-06-22 21:37:13 +00:00
|
|
|
response = make_response('OK', 202)
|
2015-07-06 19:00:07 +00:00
|
|
|
response.headers['Docker-Content-Digest'] = manifest_digest
|
2016-01-21 20:40:51 +00:00
|
|
|
response.headers['Location'] = url_for('v2.fetch_manifest_by_digest',
|
2016-03-09 21:20:28 +00:00
|
|
|
repository='%s/%s' % (namespace_name, repo_name),
|
2016-01-21 20:40:51 +00:00
|
|
|
manifest_ref=manifest_digest)
|
2015-06-22 21:37:13 +00:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
|
2016-03-09 21:20:28 +00:00
|
|
|
@parse_repository_name()
|
2016-03-09 23:09:20 +00:00
|
|
|
@process_registry_jwt_auth(scopes=['pull', 'push'])
|
2015-08-12 20:39:32 +00:00
|
|
|
@require_repo_write
|
|
|
|
@anon_protect
|
2016-03-09 21:20:28 +00:00
|
|
|
def delete_manifest_by_digest(namespace_name, repo_name, manifest_ref):
|
2015-08-12 20:39:32 +00:00
|
|
|
""" Delete the manifest specified by the digest. Note: there is no equivalent
|
|
|
|
method for deleting by tag name because it is forbidden by the spec.
|
|
|
|
"""
|
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
manifest = model.tag.load_manifest_by_digest(namespace_name, repo_name, manifest_ref)
|
2015-08-12 20:39:32 +00:00
|
|
|
except model.InvalidManifestException:
|
|
|
|
# Without a tag name to reference, we can't make an attempt to generate the manifest
|
|
|
|
raise ManifestUnknown()
|
|
|
|
|
2015-12-10 20:15:24 +00:00
|
|
|
# Mark the tag as no longer alive.
|
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
model.tag.delete_tag(namespace_name, repo_name, manifest.tag.name)
|
2015-12-10 20:15:24 +00:00
|
|
|
except model.DataModelException:
|
|
|
|
# Tag is not alive.
|
|
|
|
raise ManifestUnknown()
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2015-12-10 20:15:24 +00:00
|
|
|
track_and_log('delete_tag', manifest.tag.repository,
|
|
|
|
tag=manifest.tag.name, digest=manifest_ref)
|
2015-11-21 02:29:57 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
return make_response('', 202)
|
|
|
|
|
|
|
|
|
2016-03-09 21:20:28 +00:00
|
|
|
def _generate_and_store_manifest(namespace_name, repo_name, tag_name):
|
2015-08-12 20:39:32 +00:00
|
|
|
# First look up the tag object and its ancestors
|
2016-06-02 20:36:38 +00:00
|
|
|
image = model.tag.get_tag_image(namespace_name, repo_name, tag_name, include_storage=True)
|
2016-03-09 21:20:28 +00:00
|
|
|
parents = model.image.get_parent_images(namespace_name, repo_name, image)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
# If the manifest is being generated under the library namespace, then we make its namespace
|
|
|
|
# empty.
|
2016-03-09 21:20:28 +00:00
|
|
|
manifest_namespace = namespace_name
|
|
|
|
if features.LIBRARY_SUPPORT and namespace_name == app.config['LIBRARY_NAMESPACE']:
|
2016-01-21 20:40:51 +00:00
|
|
|
manifest_namespace = ''
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
# Create and populate the manifest builder
|
2016-01-21 20:40:51 +00:00
|
|
|
builder = SignedManifestBuilder(manifest_namespace, repo_name, tag_name)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
# Add the leaf layer
|
2015-11-06 23:18:29 +00:00
|
|
|
builder.add_layer(image.storage.content_checksum, image.v1_json_metadata)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
|
|
|
for parent in parents:
|
2015-11-06 23:18:29 +00:00
|
|
|
builder.add_layer(parent.storage.content_checksum, parent.v1_json_metadata)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2015-09-28 19:43:20 +00:00
|
|
|
# Sign the manifest with our signing key.
|
|
|
|
manifest = builder.build(docker_v2_signing_key)
|
2015-08-12 20:39:32 +00:00
|
|
|
|
2015-11-24 23:38:29 +00:00
|
|
|
# Write the manifest to the DB. If an existing manifest already exists, return the
|
|
|
|
# one found.
|
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
return model.tag.associate_generated_tag_manifest(namespace_name, repo_name, tag_name,
|
2015-11-24 23:38:29 +00:00
|
|
|
manifest.digest, manifest.bytes)
|
2016-02-12 15:39:27 +00:00
|
|
|
except IntegrityError as ie:
|
|
|
|
logger.debug('Got integrity error: %s', ie)
|
2015-11-24 23:38:29 +00:00
|
|
|
try:
|
2016-03-09 21:20:28 +00:00
|
|
|
return model.tag.load_tag_manifest(namespace_name, repo_name, tag_name)
|
2015-11-24 23:38:29 +00:00
|
|
|
except model.InvalidManifestException:
|
2016-02-12 15:39:27 +00:00
|
|
|
logger.exception('Exception when generating manifest')
|
2015-11-24 23:38:29 +00:00
|
|
|
raise model.DataModelException('Could not load or generate manifest')
|