Merge branch 'master' into create_data_interface_for_subsystem_api/tag_632
This commit is contained in:
commit
fdb63632b0
106 changed files with 2115 additions and 1992 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -11,7 +11,6 @@ static/fonts
|
||||||
static/build
|
static/build
|
||||||
stack_local
|
stack_local
|
||||||
test/data/registry/
|
test/data/registry/
|
||||||
typings
|
|
||||||
GIT_HEAD
|
GIT_HEAD
|
||||||
.idea
|
.idea
|
||||||
.python-version
|
.python-version
|
||||||
|
|
|
@ -13,6 +13,7 @@ local stages_list = [
|
||||||
'docker_release',
|
'docker_release',
|
||||||
'teardown',
|
'teardown',
|
||||||
];
|
];
|
||||||
|
|
||||||
local stages = utils.set(stages_list);
|
local stages = utils.set(stages_list);
|
||||||
|
|
||||||
// List CI jobs
|
// List CI jobs
|
||||||
|
|
|
@ -11,13 +11,12 @@ container-base-build:
|
||||||
script:
|
script:
|
||||||
- docker build --cache-from quay.io/quay/quay-base:latest -t quay.io/quay/quay-base:latest -f quay-base.dockerfile .
|
- docker build --cache-from quay.io/quay/quay-base:latest -t quay.io/quay/quay-base:latest -f quay-base.dockerfile .
|
||||||
- docker push quay.io/quay/quay-base:latest
|
- docker push quay.io/quay/quay-base:latest
|
||||||
services:
|
|
||||||
- docker:dind
|
|
||||||
stage: docker_base
|
stage: docker_base
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- kubernetes
|
||||||
variables:
|
variables:
|
||||||
DOCKER_DRIVER: aufs
|
DOCKER_DRIVER: overlay
|
||||||
|
DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375
|
||||||
container-build:
|
container-build:
|
||||||
before_script:
|
before_script:
|
||||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io
|
- docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io
|
||||||
|
@ -25,13 +24,12 @@ container-build:
|
||||||
script:
|
script:
|
||||||
- docker build -t quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} -f quay.dockerfile .
|
- docker build -t quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} -f quay.dockerfile .
|
||||||
- docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}
|
- docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}
|
||||||
services:
|
|
||||||
- docker:dind
|
|
||||||
stage: docker_build
|
stage: docker_build
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- kubernetes
|
||||||
variables:
|
variables:
|
||||||
DOCKER_DRIVER: aufs
|
DOCKER_DRIVER: overlay
|
||||||
|
DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375
|
||||||
container-release:
|
container-release:
|
||||||
before_script:
|
before_script:
|
||||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io
|
- docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io
|
||||||
|
@ -43,13 +41,12 @@ container-release:
|
||||||
- docker pull quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}
|
- docker pull quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}
|
||||||
- docker tag quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA}
|
- docker tag quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA}
|
||||||
- docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA}
|
- docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA}
|
||||||
services:
|
|
||||||
- docker:dind
|
|
||||||
stage: docker_release
|
stage: docker_release
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- kubernetes
|
||||||
variables:
|
variables:
|
||||||
DOCKER_DRIVER: aufs
|
DOCKER_DRIVER: overlay
|
||||||
|
DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375
|
||||||
karma-tests:
|
karma-tests:
|
||||||
before_script:
|
before_script:
|
||||||
- cd /
|
- cd /
|
||||||
|
|
|
@ -3,17 +3,17 @@ function(vars={})
|
||||||
dockerBuild: {
|
dockerBuild: {
|
||||||
// base job to manage containers (build / push)
|
// base job to manage containers (build / push)
|
||||||
variables: {
|
variables: {
|
||||||
DOCKER_DRIVER: "aufs",
|
DOCKER_DRIVER: "overlay",
|
||||||
|
DOCKER_HOST: "tcp://docker-host.gitlab-runner.svc.cluster.local:2375"
|
||||||
},
|
},
|
||||||
|
|
||||||
image: "docker:git",
|
image: "docker:git",
|
||||||
before_script: [
|
before_script: [
|
||||||
"docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io",
|
"docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io",
|
||||||
],
|
],
|
||||||
services: [
|
|
||||||
"docker:dind",
|
|
||||||
],
|
|
||||||
tags: [
|
tags: [
|
||||||
"docker",
|
"kubernetes",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -89,7 +89,6 @@ RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||||
ADD package.json package.json
|
ADD package.json package.json
|
||||||
ADD tsconfig.json tsconfig.json
|
ADD tsconfig.json tsconfig.json
|
||||||
ADD webpack.config.js webpack.config.js
|
ADD webpack.config.js webpack.config.js
|
||||||
ADD typings.json typings.json
|
|
||||||
ADD yarn.lock yarn.lock
|
ADD yarn.lock yarn.lock
|
||||||
RUN yarn install --ignore-engines
|
RUN yarn install --ignore-engines
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,6 @@
|
||||||
echo 'Starting security scanner worker'
|
echo 'Starting security scanner worker'
|
||||||
|
|
||||||
cd /
|
cd /
|
||||||
venv/bin/python -m workers.securityworker 2>&1
|
venv/bin/python -m workers.securityworker.securityworker 2>&1
|
||||||
|
|
||||||
echo 'Security scanner worker exited'
|
echo 'Security scanner worker exited'
|
||||||
|
|
|
@ -62,7 +62,7 @@ def create_manifest_label(tag_manifest, key, value, source_type_name, media_type
|
||||||
|
|
||||||
media_type_id = _get_media_type_id(media_type_name)
|
media_type_id = _get_media_type_id(media_type_name)
|
||||||
if media_type_id is None:
|
if media_type_id is None:
|
||||||
raise InvalidMediaTypeException
|
raise InvalidMediaTypeException()
|
||||||
|
|
||||||
source_type_id = _get_label_source_type_id(source_type_name)
|
source_type_id = _get_label_source_type_id(source_type_name)
|
||||||
|
|
||||||
|
|
|
@ -139,8 +139,9 @@ def reset_notification_number_of_failures(namespace_name, repository_name, uuid)
|
||||||
notification.repository.name != repository_name):
|
notification.repository.name != repository_name):
|
||||||
raise InvalidNotificationException('No repository notification found with uuid: %s' % uuid)
|
raise InvalidNotificationException('No repository notification found with uuid: %s' % uuid)
|
||||||
reset_number_of_failures_to_zero(notification.id)
|
reset_number_of_failures_to_zero(notification.id)
|
||||||
|
return notification
|
||||||
except RepositoryNotification.DoesNotExist:
|
except RepositoryNotification.DoesNotExist:
|
||||||
pass
|
return None
|
||||||
|
|
||||||
|
|
||||||
def reset_number_of_failures_to_zero(notification_id):
|
def reset_number_of_failures_to_zero(notification_id):
|
||||||
|
|
|
@ -10,6 +10,7 @@ from endpoints.exception import NotFound
|
||||||
from data import model
|
from data import model
|
||||||
|
|
||||||
from digest import digest_tools
|
from digest import digest_tools
|
||||||
|
from util.validation import VALID_LABEL_KEY_REGEX
|
||||||
|
|
||||||
BASE_MANIFEST_ROUTE = '/v1/repository/<apirepopath:repository>/manifest/<regex("{0}"):manifestref>'
|
BASE_MANIFEST_ROUTE = '/v1/repository/<apirepopath:repository>/manifest/<regex("{0}"):manifestref>'
|
||||||
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
||||||
|
@ -92,9 +93,17 @@ class RepositoryManifestLabels(RepositoryParamResource):
|
||||||
if label_validator.has_reserved_prefix(label_data['key']):
|
if label_validator.has_reserved_prefix(label_data['key']):
|
||||||
abort(400, message='Label has a reserved prefix')
|
abort(400, message='Label has a reserved prefix')
|
||||||
|
|
||||||
label = model.label.create_manifest_label(tag_manifest, label_data['key'],
|
label = None
|
||||||
label_data['value'], 'api',
|
try:
|
||||||
media_type_name=label_data['media_type'])
|
label = model.label.create_manifest_label(tag_manifest, label_data['key'],
|
||||||
|
label_data['value'], 'api',
|
||||||
|
media_type_name=label_data['media_type'])
|
||||||
|
except model.InvalidLabelKeyException:
|
||||||
|
abort(400, message='Label is of an invalid format or missing please use %s format for labels'.format(
|
||||||
|
VALID_LABEL_KEY_REGEX))
|
||||||
|
except model.InvalidMediaTypeException:
|
||||||
|
abort(400, message='Media type is invalid please use a valid media type of text/plain or application/json')
|
||||||
|
|
||||||
metadata = {
|
metadata = {
|
||||||
'id': label.uuid,
|
'id': label.uuid,
|
||||||
'key': label_data['key'],
|
'key': label_data['key'],
|
||||||
|
|
|
@ -161,10 +161,12 @@ class RepositoryNotification(RepositoryParamResource):
|
||||||
@disallow_for_app_repositories
|
@disallow_for_app_repositories
|
||||||
def post(self, namespace, repository, uuid):
|
def post(self, namespace, repository, uuid):
|
||||||
""" Resets repository notification to 0 failures. """
|
""" Resets repository notification to 0 failures. """
|
||||||
model.notification.reset_notification_number_of_failures(namespace, repository, uuid)
|
reset = model.notification.reset_notification_number_of_failures(namespace, repository, uuid)
|
||||||
log_action('reset_repo_notification', namespace,
|
if reset is not None:
|
||||||
{'repo': repository, 'namespace': namespace, 'notification_id': uuid},
|
log_action('reset_repo_notification', namespace,
|
||||||
repo=model.repository.get_repository(namespace, repository))
|
{'repo': repository, 'namespace': namespace, 'notification_id': uuid,
|
||||||
|
'event': reset.event.name, 'method': reset.method.name},
|
||||||
|
repo=model.repository.get_repository(namespace, repository))
|
||||||
|
|
||||||
return 'No Content', 204
|
return 'No Content', 204
|
||||||
|
|
||||||
|
|
|
@ -1,58 +1,10 @@
|
||||||
import datetime
|
from endpoints.test.shared import conduct_call
|
||||||
import json
|
|
||||||
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from data import model
|
|
||||||
from endpoints.api import api
|
from endpoints.api import api
|
||||||
|
|
||||||
CSRF_TOKEN_KEY = '_csrf_token'
|
|
||||||
CSRF_TOKEN = '123csrfforme'
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def client_with_identity(auth_username, client):
|
|
||||||
with client.session_transaction() as sess:
|
|
||||||
if auth_username and auth_username is not None:
|
|
||||||
loaded = model.user.get_user(auth_username)
|
|
||||||
sess['user_id'] = loaded.uuid
|
|
||||||
sess['login_time'] = datetime.datetime.now()
|
|
||||||
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
|
||||||
else:
|
|
||||||
sess['user_id'] = 'anonymous'
|
|
||||||
|
|
||||||
yield client
|
|
||||||
|
|
||||||
with client.session_transaction() as sess:
|
|
||||||
sess['user_id'] = None
|
|
||||||
sess['login_time'] = None
|
|
||||||
sess[CSRF_TOKEN_KEY] = None
|
|
||||||
|
|
||||||
|
|
||||||
def add_csrf_param(params):
|
|
||||||
""" Returns a params dict with the CSRF parameter added. """
|
|
||||||
params = params or {}
|
|
||||||
params[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def conduct_api_call(client, resource, method, params, body=None, expected_code=200):
|
def conduct_api_call(client, resource, method, params, body=None, expected_code=200):
|
||||||
""" Conducts an API call to the given resource via the given client, and ensures its returned
|
""" Conducts an API call to the given resource via the given client, and ensures its returned
|
||||||
status matches the code given.
|
status matches the code given.
|
||||||
|
|
||||||
Returns the response.
|
Returns the response.
|
||||||
"""
|
"""
|
||||||
params = add_csrf_param(params)
|
return conduct_call(client, resource, api.url_for, method, params, body, expected_code)
|
||||||
|
|
||||||
final_url = api.url_for(resource, **params)
|
|
||||||
|
|
||||||
headers = {}
|
|
||||||
headers.update({"Content-Type": "application/json"})
|
|
||||||
|
|
||||||
if body is not None:
|
|
||||||
body = json.dumps(body)
|
|
||||||
|
|
||||||
rv = client.open(final_url, method=method, data=body, headers=headers)
|
|
||||||
msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code,
|
|
||||||
rv.data)
|
|
||||||
assert rv.status_code == expected_code, msg
|
|
||||||
return rv
|
|
||||||
|
|
|
@ -16,7 +16,8 @@ from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerS
|
||||||
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
|
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
|
||||||
TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
|
TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
|
||||||
BuildTriggerSourceNamespaces)
|
BuildTriggerSourceNamespaces)
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
BUILD_ARGS = {'build_uuid': '1234'}
|
BUILD_ARGS = {'build_uuid': '1234'}
|
||||||
|
|
|
@ -2,8 +2,9 @@ import pytest
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from endpoints.api import api
|
from endpoints.api import api
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
from endpoints.api.organization import Organization
|
from endpoints.api.organization import Organization
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
@pytest.mark.parametrize('expiration, expected_code', [
|
@pytest.mark.parametrize('expiration, expected_code', [
|
||||||
|
|
|
@ -2,8 +2,9 @@ import pytest
|
||||||
|
|
||||||
from mock import patch, ANY, MagicMock
|
from mock import patch, ANY, MagicMock
|
||||||
|
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
from endpoints.api.repository import RepositoryTrust, Repository
|
from endpoints.api.repository import RepositoryTrust, Repository
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
from features import FeatureNameValue
|
from features import FeatureNameValue
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
@ -52,8 +53,8 @@ def test_signing_disabled(client):
|
||||||
params = {'repository': 'devtable/simple'}
|
params = {'repository': 'devtable/simple'}
|
||||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||||
assert not response['trust_enabled']
|
assert not response['trust_enabled']
|
||||||
|
|
||||||
|
|
||||||
def test_sni_support():
|
def test_sni_support():
|
||||||
import ssl
|
import ssl
|
||||||
assert ssl.HAS_SNI
|
assert ssl.HAS_SNI
|
||||||
|
|
|
@ -4,7 +4,8 @@ from playhouse.test_utils import assert_query_count
|
||||||
|
|
||||||
from data.model import _basequery
|
from data.model import _basequery
|
||||||
from endpoints.api.search import ConductRepositorySearch, ConductSearch
|
from endpoints.api.search import ConductRepositorySearch, ConductSearch
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
@pytest.mark.parametrize('query, expected_query_count', [
|
@pytest.mark.parametrize('query, expected_query_count', [
|
||||||
|
|
|
@ -4,12 +4,13 @@ from flask_principal import AnonymousIdentity
|
||||||
from endpoints.api import api
|
from endpoints.api import api
|
||||||
from endpoints.api.repositorynotification import RepositoryNotification
|
from endpoints.api.repositorynotification import RepositoryNotification
|
||||||
from endpoints.api.team import OrganizationTeamSyncing
|
from endpoints.api.team import OrganizationTeamSyncing
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
from endpoints.api.repository import RepositoryTrust
|
from endpoints.api.repository import RepositoryTrust
|
||||||
from endpoints.api.signing import RepositorySignatures
|
from endpoints.api.signing import RepositorySignatures
|
||||||
from endpoints.api.search import ConductRepositorySearch
|
from endpoints.api.search import ConductRepositorySearch
|
||||||
from endpoints.api.superuser import SuperUserRepositoryBuildLogs, SuperUserRepositoryBuildResource
|
from endpoints.api.superuser import SuperUserRepositoryBuildLogs, SuperUserRepositoryBuildResource
|
||||||
from endpoints.api.superuser import SuperUserRepositoryBuildStatus
|
from endpoints.api.superuser import SuperUserRepositoryBuildStatus
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,9 @@ import pytest
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from mock import patch
|
from mock import patch
|
||||||
|
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
from endpoints.api.signing import RepositorySignatures
|
from endpoints.api.signing import RepositorySignatures
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
|
@ -14,21 +15,21 @@ VALID_TARGETS_MAP = {
|
||||||
"latest": {
|
"latest": {
|
||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
||||||
},
|
},
|
||||||
"length": 2111
|
"length": 2111
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"expiration": "2020-05-22T10:26:46.618176424-04:00"
|
"expiration": "2020-05-22T10:26:46.618176424-04:00"
|
||||||
},
|
},
|
||||||
"targets": {
|
"targets": {
|
||||||
"targets": {
|
"targets": {
|
||||||
"latest": {
|
"latest": {
|
||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
||||||
},
|
},
|
||||||
"length": 2111
|
"length": 2111
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"expiration": "2020-05-22T10:26:01.953414888-04:00"}
|
"expiration": "2020-05-22T10:26:01.953414888-04:00"}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,12 @@ import pytest
|
||||||
|
|
||||||
from mock import patch, Mock, MagicMock, call
|
from mock import patch, Mock, MagicMock, call
|
||||||
|
|
||||||
|
|
||||||
from endpoints.api.tag_models_interface import RepositoryTagHistory, Tag
|
from endpoints.api.tag_models_interface import RepositoryTagHistory, Tag
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags
|
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags
|
||||||
|
|
||||||
from features import FeatureNameValue
|
from features import FeatureNameValue
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
|
@ -4,9 +4,11 @@ from mock import patch
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from endpoints.api import api
|
from endpoints.api import api
|
||||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
from endpoints.api.test.shared import conduct_api_call
|
||||||
from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
|
from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
|
||||||
from endpoints.api.organization import Organization
|
from endpoints.api.organization import Organization
|
||||||
|
from endpoints.test.shared import client_with_identity
|
||||||
|
|
||||||
from test.test_ldap import mock_ldap
|
from test.test_ldap import mock_ldap
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
|
@ -5,7 +5,7 @@ from flask import url_for
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from endpoints.appr.registry import appr_bp, blobs
|
from endpoints.appr.registry import appr_bp, blobs
|
||||||
from endpoints.api.test.shared import client_with_identity
|
from endpoints.test.shared import client_with_identity
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
BLOB_ARGS = {'digest': 'abcd1235'}
|
BLOB_ARGS = {'digest': 'abcd1235'}
|
||||||
|
|
0
endpoints/test/__init__.py
Normal file
0
endpoints/test/__init__.py
Normal file
68
endpoints/test/shared.py
Normal file
68
endpoints/test/shared.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from data import model
|
||||||
|
|
||||||
|
from flask import g
|
||||||
|
from flask_principal import Identity
|
||||||
|
|
||||||
|
CSRF_TOKEN_KEY = '_csrf_token'
|
||||||
|
CSRF_TOKEN = '123csrfforme'
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def client_with_identity(auth_username, client):
|
||||||
|
with client.session_transaction() as sess:
|
||||||
|
if auth_username and auth_username is not None:
|
||||||
|
loaded = model.user.get_user(auth_username)
|
||||||
|
sess['user_id'] = loaded.uuid
|
||||||
|
sess['login_time'] = datetime.datetime.now()
|
||||||
|
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
||||||
|
else:
|
||||||
|
sess['user_id'] = 'anonymous'
|
||||||
|
|
||||||
|
yield client
|
||||||
|
|
||||||
|
with client.session_transaction() as sess:
|
||||||
|
sess['user_id'] = None
|
||||||
|
sess['login_time'] = None
|
||||||
|
sess[CSRF_TOKEN_KEY] = None
|
||||||
|
|
||||||
|
|
||||||
|
def add_csrf_param(params):
|
||||||
|
""" Returns a params dict with the CSRF parameter added. """
|
||||||
|
params = params or {}
|
||||||
|
|
||||||
|
if not CSRF_TOKEN_KEY in params:
|
||||||
|
params[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def gen_basic_auth(username, password):
|
||||||
|
""" Generates a basic auth header. """
|
||||||
|
return 'Basic ' + base64.b64encode("%s:%s" % (username, password))
|
||||||
|
|
||||||
|
|
||||||
|
def conduct_call(client, resource, url_for, method, params, body=None, expected_code=200,
|
||||||
|
headers=None):
|
||||||
|
""" Conducts a call to a Flask endpoint. """
|
||||||
|
params = add_csrf_param(params)
|
||||||
|
|
||||||
|
final_url = url_for(resource, **params)
|
||||||
|
|
||||||
|
headers = headers or {}
|
||||||
|
headers.update({"Content-Type": "application/json"})
|
||||||
|
|
||||||
|
if body is not None:
|
||||||
|
body = json.dumps(body)
|
||||||
|
|
||||||
|
# Required for anonymous calls to not exception.
|
||||||
|
g.identity = Identity(None, 'none')
|
||||||
|
|
||||||
|
rv = client.open(final_url, method=method, data=body, headers=headers)
|
||||||
|
msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code,
|
||||||
|
rv.data)
|
||||||
|
assert rv.status_code == expected_code, msg
|
||||||
|
return rv
|
|
@ -12,21 +12,19 @@ import features
|
||||||
|
|
||||||
from app import app, metric_queue, get_app_url, license_validator
|
from app import app, metric_queue, get_app_url, license_validator
|
||||||
from auth.auth_context import get_grant_context
|
from auth.auth_context import get_grant_context
|
||||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
|
from auth.permissions import (
|
||||||
AdministerRepositoryPermission)
|
ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission)
|
||||||
from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers
|
from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers
|
||||||
from data.interfaces.v2 import pre_oci_model as model
|
|
||||||
from endpoints.decorators import anon_protect, anon_allowed
|
from endpoints.decorators import anon_protect, anon_allowed
|
||||||
from endpoints.v2.errors import V2RegistryException, Unauthorized, Unsupported, NameUnknown
|
from endpoints.v2.errors import V2RegistryException, Unauthorized, Unsupported, NameUnknown
|
||||||
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
from util.http import abort
|
from util.http import abort
|
||||||
from util.metrics.metricqueue import time_blueprint
|
from util.metrics.metricqueue import time_blueprint
|
||||||
from util.registry.dockerver import docker_version
|
from util.registry.dockerver import docker_version
|
||||||
from util.pagination import encrypt_page_token, decrypt_page_token
|
from util.pagination import encrypt_page_token, decrypt_page_token
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
v2_bp = Blueprint('v2', __name__)
|
v2_bp = Blueprint('v2', __name__)
|
||||||
license_validator.enforce_license_before_request(v2_bp)
|
license_validator.enforce_license_before_request(v2_bp)
|
||||||
time_blueprint(v2_bp, metric_queue)
|
time_blueprint(v2_bp, metric_queue)
|
||||||
|
@ -34,9 +32,7 @@ time_blueprint(v2_bp, metric_queue)
|
||||||
|
|
||||||
@v2_bp.app_errorhandler(V2RegistryException)
|
@v2_bp.app_errorhandler(V2RegistryException)
|
||||||
def handle_registry_v2_exception(error):
|
def handle_registry_v2_exception(error):
|
||||||
response = jsonify({
|
response = jsonify({'errors': [error.as_dict()]})
|
||||||
'errors': [error.as_dict()]
|
|
||||||
})
|
|
||||||
|
|
||||||
response.status_code = error.http_status_code
|
response.status_code = error.http_status_code
|
||||||
if response.status_code == 401:
|
if response.status_code == 401:
|
||||||
|
@ -53,6 +49,7 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
|
||||||
"""
|
"""
|
||||||
Decorates a handler adding a parsed pagination token and a callback to encode a response token.
|
Decorates a handler adding a parsed pagination token and a callback to encode a response token.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(func):
|
def wrapper(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapped(*args, **kwargs):
|
def wrapped(*args, **kwargs):
|
||||||
|
@ -86,7 +83,9 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
|
||||||
kwargs[offset_kwarg_name] = offset
|
kwargs[offset_kwarg_name] = offset
|
||||||
kwargs[callback_kwarg_name] = callback
|
kwargs[callback_kwarg_name] = callback
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@ -94,17 +93,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False):
|
||||||
def wrapper(func):
|
def wrapper(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapped(namespace_name, repo_name, *args, **kwargs):
|
def wrapped(namespace_name, repo_name, *args, **kwargs):
|
||||||
logger.debug('Checking permission %s for repo: %s/%s', permission_class,
|
logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace_name,
|
||||||
namespace_name, repo_name)
|
repo_name)
|
||||||
repository = namespace_name + '/' + repo_name
|
repository = namespace_name + '/' + repo_name
|
||||||
repo = model.get_repository(namespace_name, repo_name)
|
repo = model.get_repository(namespace_name, repo_name)
|
||||||
if repo is None:
|
if repo is None:
|
||||||
raise Unauthorized(repository=repository, scopes=scopes)
|
raise Unauthorized(repository=repository, scopes=scopes)
|
||||||
|
|
||||||
permission = permission_class(namespace_name, repo_name)
|
permission = permission_class(namespace_name, repo_name)
|
||||||
if (permission.can() or
|
if (permission.can() or (allow_public and repo.is_public)):
|
||||||
(allow_public and
|
|
||||||
repo.is_public)):
|
|
||||||
if repo.kind != 'image':
|
if repo.kind != 'image':
|
||||||
msg = 'This repository is for managing %s resources and not container images.' % repo.kind
|
msg = 'This repository is for managing %s resources and not container images.' % repo.kind
|
||||||
raise Unsupported(detail=msg)
|
raise Unsupported(detail=msg)
|
||||||
|
@ -112,16 +109,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False):
|
||||||
raise Unauthorized(repository=repository, scopes=scopes)
|
raise Unauthorized(repository=repository, scopes=scopes)
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
require_repo_read = _require_repo_permission(ReadRepositoryPermission,
|
require_repo_read = _require_repo_permission(ReadRepositoryPermission, scopes=['pull'],
|
||||||
scopes=['pull'],
|
|
||||||
allow_public=True)
|
allow_public=True)
|
||||||
require_repo_write = _require_repo_permission(ModifyRepositoryPermission,
|
require_repo_write = _require_repo_permission(ModifyRepositoryPermission, scopes=['pull', 'push'])
|
||||||
scopes=['pull', 'push'])
|
require_repo_admin = _require_repo_permission(AdministerRepositoryPermission, scopes=[
|
||||||
require_repo_admin = _require_repo_permission(AdministerRepositoryPermission,
|
'pull', 'push'])
|
||||||
scopes=['pull', 'push'])
|
|
||||||
|
|
||||||
|
|
||||||
def get_input_stream(flask_request):
|
def get_input_stream(flask_request):
|
||||||
|
@ -138,7 +134,9 @@ def route_show_if(value):
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
|
||||||
return decorated_function
|
return decorated_function
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
@ -169,5 +167,4 @@ from endpoints.v2 import (
|
||||||
catalog,
|
catalog,
|
||||||
manifest,
|
manifest,
|
||||||
tag,
|
tag,
|
||||||
v2auth,
|
v2auth,)
|
||||||
)
|
|
||||||
|
|
|
@ -10,22 +10,20 @@ import resumablehashlib
|
||||||
from app import storage, app, get_app_url, metric_queue
|
from app import storage, app, get_app_url, metric_queue
|
||||||
from auth.registry_jwt_auth import process_registry_jwt_auth
|
from auth.registry_jwt_auth import process_registry_jwt_auth
|
||||||
from data import database
|
from data import database
|
||||||
from data.interfaces.v2 import pre_oci_model as model
|
|
||||||
from digest import digest_tools
|
from digest import digest_tools
|
||||||
from endpoints.common import parse_repository_name
|
from endpoints.common import parse_repository_name
|
||||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
|
||||||
from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported,
|
|
||||||
NameUnknown, LayerTooLarge)
|
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
|
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||||
|
from endpoints.v2.errors import (
|
||||||
|
BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge)
|
||||||
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
from util.cache import cache_control
|
from util.cache import cache_control
|
||||||
from util.registry.filelike import wrap_with_handler, StreamSlice
|
from util.registry.filelike import wrap_with_handler, StreamSlice
|
||||||
from util.registry.gzipstream import calculate_size_handler
|
from util.registry.gzipstream import calculate_size_handler
|
||||||
from util.registry.torrent import PieceHasher
|
from util.registry.torrent import PieceHasher
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
BASE_BLOB_ROUTE = '/<repopath:repository>/blobs/<regex("{0}"):digest>'
|
BASE_BLOB_ROUTE = '/<repopath:repository>/blobs/<regex("{0}"):digest>'
|
||||||
BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
||||||
RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$')
|
RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$')
|
||||||
|
@ -52,8 +50,7 @@ def check_blob_exists(namespace_name, repo_name, digest):
|
||||||
headers = {
|
headers = {
|
||||||
'Docker-Content-Digest': digest,
|
'Docker-Content-Digest': digest,
|
||||||
'Content-Length': blob.size,
|
'Content-Length': blob.size,
|
||||||
'Content-Type': BLOB_CONTENT_TYPE,
|
'Content-Type': BLOB_CONTENT_TYPE,}
|
||||||
}
|
|
||||||
|
|
||||||
# If our storage supports range requests, let the client know.
|
# If our storage supports range requests, let the client know.
|
||||||
if storage.get_supports_resumable_downloads(blob.locations):
|
if storage.get_supports_resumable_downloads(blob.locations):
|
||||||
|
@ -102,10 +99,7 @@ def download_blob(namespace_name, repo_name, digest):
|
||||||
storage.stream_read(blob.locations, path),
|
storage.stream_read(blob.locations, path),
|
||||||
headers=headers.update({
|
headers=headers.update({
|
||||||
'Content-Length': blob.size,
|
'Content-Length': blob.size,
|
||||||
'Content-Type': BLOB_CONTENT_TYPE,
|
'Content-Type': BLOB_CONTENT_TYPE,}),)
|
||||||
}),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/blobs/uploads/', methods=['POST'])
|
@v2_bp.route('/<repopath:repository>/blobs/uploads/', methods=['POST'])
|
||||||
|
@ -128,13 +122,13 @@ def start_blob_upload(namespace_name, repo_name):
|
||||||
return Response(
|
return Response(
|
||||||
status=202,
|
status=202,
|
||||||
headers={
|
headers={
|
||||||
'Docker-Upload-UUID': new_upload_uuid,
|
'Docker-Upload-UUID':
|
||||||
'Range': _render_range(0),
|
new_upload_uuid,
|
||||||
'Location': get_app_url() + url_for('v2.upload_chunk',
|
'Range':
|
||||||
repository='%s/%s' % (namespace_name, repo_name),
|
_render_range(0),
|
||||||
upload_uuid=new_upload_uuid)
|
'Location':
|
||||||
},
|
get_app_url() + url_for('v2.upload_chunk', repository='%s/%s' %
|
||||||
)
|
(namespace_name, repo_name), upload_uuid=new_upload_uuid)},)
|
||||||
|
|
||||||
# The user plans to send us the entire body right now.
|
# The user plans to send us the entire body right now.
|
||||||
# Find the upload.
|
# Find the upload.
|
||||||
|
@ -158,12 +152,11 @@ def start_blob_upload(namespace_name, repo_name):
|
||||||
return Response(
|
return Response(
|
||||||
status=201,
|
status=201,
|
||||||
headers={
|
headers={
|
||||||
'Docker-Content-Digest': digest,
|
'Docker-Content-Digest':
|
||||||
'Location': get_app_url() + url_for('v2.download_blob',
|
digest,
|
||||||
repository='%s/%s' % (namespace_name, repo_name),
|
'Location':
|
||||||
digest=digest),
|
get_app_url() + url_for('v2.download_blob', repository='%s/%s' %
|
||||||
},
|
(namespace_name, repo_name), digest=digest),},)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['GET'])
|
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['GET'])
|
||||||
|
@ -180,9 +173,8 @@ def fetch_existing_upload(namespace_name, repo_name, upload_uuid):
|
||||||
status=204,
|
status=204,
|
||||||
headers={
|
headers={
|
||||||
'Docker-Upload-UUID': upload_uuid,
|
'Docker-Upload-UUID': upload_uuid,
|
||||||
'Range': _render_range(blob_upload.byte_count+1), # byte ranges are exclusive
|
'Range': _render_range(blob_upload.byte_count + 1), # byte ranges are exclusive
|
||||||
},
|
},)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PATCH'])
|
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PATCH'])
|
||||||
|
@ -211,9 +203,7 @@ def upload_chunk(namespace_name, repo_name, upload_uuid):
|
||||||
headers={
|
headers={
|
||||||
'Location': _current_request_url(),
|
'Location': _current_request_url(),
|
||||||
'Range': _render_range(updated_blob_upload.byte_count, with_bytes_prefix=False),
|
'Range': _render_range(updated_blob_upload.byte_count, with_bytes_prefix=False),
|
||||||
'Docker-Upload-UUID': upload_uuid,
|
'Docker-Upload-UUID': upload_uuid,},)
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PUT'])
|
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PUT'])
|
||||||
|
@ -242,15 +232,12 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
|
||||||
_finish_upload(namespace_name, repo_name, updated_blob_upload, digest)
|
_finish_upload(namespace_name, repo_name, updated_blob_upload, digest)
|
||||||
|
|
||||||
# Write the response to the client.
|
# Write the response to the client.
|
||||||
return Response(
|
return Response(status=201, headers={
|
||||||
status=201,
|
'Docker-Content-Digest':
|
||||||
headers={
|
digest,
|
||||||
'Docker-Content-Digest': digest,
|
'Location':
|
||||||
'Location': get_app_url() + url_for('v2.download_blob',
|
get_app_url() + url_for('v2.download_blob', repository='%s/%s' %
|
||||||
repository='%s/%s' % (namespace_name, repo_name),
|
(namespace_name, repo_name), digest=digest),})
|
||||||
digest=digest),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['DELETE'])
|
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['DELETE'])
|
||||||
|
@ -300,9 +287,11 @@ def _abort_range_not_satisfiable(valid_end, upload_uuid):
|
||||||
|
|
||||||
TODO(jzelinskie): Unify this with the V2RegistryException class.
|
TODO(jzelinskie): Unify this with the V2RegistryException class.
|
||||||
"""
|
"""
|
||||||
flask_abort(Response(status=416, headers={'Location': _current_request_url(),
|
flask_abort(
|
||||||
'Range': '0-{0}'.format(valid_end),
|
Response(status=416, headers={
|
||||||
'Docker-Upload-UUID': upload_uuid}))
|
'Location': _current_request_url(),
|
||||||
|
'Range': '0-{0}'.format(valid_end),
|
||||||
|
'Docker-Upload-UUID': upload_uuid}))
|
||||||
|
|
||||||
|
|
||||||
def _parse_range_header(range_header_text):
|
def _parse_range_header(range_header_text):
|
||||||
|
@ -415,16 +404,15 @@ def _upload_chunk(blob_upload, range_header):
|
||||||
length,
|
length,
|
||||||
input_fp,
|
input_fp,
|
||||||
blob_upload.storage_metadata,
|
blob_upload.storage_metadata,
|
||||||
content_type=BLOB_CONTENT_TYPE,
|
content_type=BLOB_CONTENT_TYPE,)
|
||||||
)
|
|
||||||
|
|
||||||
if upload_error is not None:
|
if upload_error is not None:
|
||||||
logger.error('storage.stream_upload_chunk returned error %s', upload_error)
|
logger.error('storage.stream_upload_chunk returned error %s', upload_error)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Update the chunk upload time metric.
|
# Update the chunk upload time metric.
|
||||||
metric_queue.chunk_upload_time.Observe(time.time() - start_time,
|
metric_queue.chunk_upload_time.Observe(time.time() - start_time, labelvalues=[
|
||||||
labelvalues=[length_written, list(location_set)[0]])
|
length_written, list(location_set)[0]])
|
||||||
|
|
||||||
# If we determined an uncompressed size and this is the first chunk, add it to the blob.
|
# If we determined an uncompressed size and this is the first chunk, add it to the blob.
|
||||||
# Otherwise, we clear the size from the blob as it was uploaded in multiple chunks.
|
# Otherwise, we clear the size from the blob as it was uploaded in multiple chunks.
|
||||||
|
@ -499,8 +487,7 @@ def _finalize_blob_database(namespace_name, repo_name, blob_upload, digest, alre
|
||||||
repo_name,
|
repo_name,
|
||||||
digest,
|
digest,
|
||||||
blob_upload,
|
blob_upload,
|
||||||
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],
|
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],)
|
||||||
)
|
|
||||||
|
|
||||||
# If it doesn't already exist, create the BitTorrent pieces for the blob.
|
# If it doesn't already exist, create the BitTorrent pieces for the blob.
|
||||||
if blob_upload.piece_sha_state is not None and not already_existed:
|
if blob_upload.piece_sha_state is not None and not already_existed:
|
||||||
|
@ -521,5 +508,4 @@ def _finish_upload(namespace_name, repo_name, blob_upload, digest):
|
||||||
repo_name,
|
repo_name,
|
||||||
blob_upload,
|
blob_upload,
|
||||||
digest,
|
digest,
|
||||||
_finalize_blob_storage(blob_upload, digest),
|
_finalize_blob_storage(blob_upload, digest),)
|
||||||
)
|
|
||||||
|
|
|
@ -5,7 +5,8 @@ from flask import jsonify
|
||||||
from auth.registry_jwt_auth import process_registry_jwt_auth, get_granted_entity
|
from auth.registry_jwt_auth import process_registry_jwt_auth, get_granted_entity
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from endpoints.v2 import v2_bp, paginate
|
from endpoints.v2 import v2_bp, paginate
|
||||||
from data.interfaces.v2 import pre_oci_model as model
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/_catalog', methods=['GET'])
|
@v2_bp.route('/_catalog', methods=['GET'])
|
||||||
@process_registry_jwt_auth()
|
@process_registry_jwt_auth()
|
||||||
|
@ -18,12 +19,11 @@ def catalog_search(limit, offset, pagination_callback):
|
||||||
username = entity.user.username
|
username = entity.user.username
|
||||||
|
|
||||||
include_public = bool(features.PUBLIC_CATALOG)
|
include_public = bool(features.PUBLIC_CATALOG)
|
||||||
visible_repositories = model.get_visible_repositories(username, limit+1, offset,
|
visible_repositories = model.get_visible_repositories(username, limit + 1, offset,
|
||||||
include_public=include_public)
|
include_public=include_public)
|
||||||
response = jsonify({
|
response = jsonify({
|
||||||
'repositories': ['%s/%s' % (repo.namespace_name, repo.name)
|
'repositories': ['%s/%s' % (repo.namespace_name, repo.name)
|
||||||
for repo in visible_repositories][0:limit],
|
for repo in visible_repositories][0:limit],})
|
||||||
})
|
|
||||||
|
|
||||||
pagination_callback(len(visible_repositories), response)
|
pagination_callback(len(visible_repositories), response)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import bitmath
|
import bitmath
|
||||||
|
|
||||||
|
|
||||||
class V2RegistryException(Exception):
|
class V2RegistryException(Exception):
|
||||||
def __init__(self, error_code_str, message, detail, http_status_code=400,
|
def __init__(self, error_code_str, message, detail, http_status_code=400, repository=None,
|
||||||
repository=None, scopes=None):
|
scopes=None):
|
||||||
super(V2RegistryException, self).__init__(message)
|
super(V2RegistryException, self).__init__(message)
|
||||||
self.http_status_code = http_status_code
|
self.http_status_code = http_status_code
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
|
@ -15,104 +16,81 @@ class V2RegistryException(Exception):
|
||||||
return {
|
return {
|
||||||
'code': self._error_code_str,
|
'code': self._error_code_str,
|
||||||
'message': self.message,
|
'message': self.message,
|
||||||
'detail': self._detail if self._detail is not None else {},
|
'detail': self._detail if self._detail is not None else {},}
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class BlobUnknown(V2RegistryException):
|
class BlobUnknown(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(BlobUnknown, self).__init__('BLOB_UNKNOWN',
|
super(BlobUnknown, self).__init__('BLOB_UNKNOWN', 'blob unknown to registry', detail, 404)
|
||||||
'blob unknown to registry',
|
|
||||||
detail,
|
|
||||||
404)
|
|
||||||
|
|
||||||
|
|
||||||
class BlobUploadInvalid(V2RegistryException):
|
class BlobUploadInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID',
|
super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID', 'blob upload invalid', detail)
|
||||||
'blob upload invalid',
|
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class BlobUploadUnknown(V2RegistryException):
|
class BlobUploadUnknown(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(BlobUploadUnknown, self).__init__('BLOB_UPLOAD_UNKNOWN',
|
super(BlobUploadUnknown, self).__init__('BLOB_UPLOAD_UNKNOWN',
|
||||||
'blob upload unknown to registry',
|
'blob upload unknown to registry', detail, 404)
|
||||||
detail,
|
|
||||||
404)
|
|
||||||
|
|
||||||
|
|
||||||
class DigestInvalid(V2RegistryException):
|
class DigestInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(DigestInvalid, self).__init__('DIGEST_INVALID',
|
super(DigestInvalid, self).__init__('DIGEST_INVALID',
|
||||||
'provided digest did not match uploaded content',
|
'provided digest did not match uploaded content', detail)
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class ManifestBlobUnknown(V2RegistryException):
|
class ManifestBlobUnknown(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(ManifestBlobUnknown, self).__init__('MANIFEST_BLOB_UNKNOWN',
|
super(ManifestBlobUnknown, self).__init__('MANIFEST_BLOB_UNKNOWN',
|
||||||
'manifest blob unknown to registry',
|
'manifest blob unknown to registry', detail)
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class ManifestInvalid(V2RegistryException):
|
class ManifestInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None, http_status_code=400):
|
def __init__(self, detail=None, http_status_code=400):
|
||||||
super(ManifestInvalid, self).__init__('MANIFEST_INVALID',
|
super(ManifestInvalid, self).__init__('MANIFEST_INVALID', 'manifest invalid', detail,
|
||||||
'manifest invalid',
|
|
||||||
detail,
|
|
||||||
http_status_code)
|
http_status_code)
|
||||||
|
|
||||||
|
|
||||||
class ManifestUnknown(V2RegistryException):
|
class ManifestUnknown(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN',
|
super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN', 'manifest unknown', detail, 404)
|
||||||
'manifest unknown',
|
|
||||||
detail,
|
|
||||||
404)
|
|
||||||
|
|
||||||
|
|
||||||
class ManifestUnverified(V2RegistryException):
|
class ManifestUnverified(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(ManifestUnverified, self).__init__('MANIFEST_UNVERIFIED',
|
super(ManifestUnverified, self).__init__('MANIFEST_UNVERIFIED',
|
||||||
'manifest failed signature verification',
|
'manifest failed signature verification', detail)
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class NameInvalid(V2RegistryException):
|
class NameInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None, message=None):
|
def __init__(self, detail=None, message=None):
|
||||||
super(NameInvalid, self).__init__('NAME_INVALID',
|
super(NameInvalid, self).__init__('NAME_INVALID', message or 'invalid repository name', detail)
|
||||||
message or 'invalid repository name',
|
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class NameUnknown(V2RegistryException):
|
class NameUnknown(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(NameUnknown, self).__init__('NAME_UNKNOWN',
|
super(NameUnknown, self).__init__('NAME_UNKNOWN', 'repository name not known to registry',
|
||||||
'repository name not known to registry',
|
detail, 404)
|
||||||
detail,
|
|
||||||
404)
|
|
||||||
|
|
||||||
|
|
||||||
class SizeInvalid(V2RegistryException):
|
class SizeInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(SizeInvalid, self).__init__('SIZE_INVALID',
|
super(SizeInvalid, self).__init__('SIZE_INVALID',
|
||||||
'provided length did not match content length',
|
'provided length did not match content length', detail)
|
||||||
detail)
|
|
||||||
|
|
||||||
|
|
||||||
class TagAlreadyExists(V2RegistryException):
|
class TagAlreadyExists(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS',
|
super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS', 'tag was already pushed', detail,
|
||||||
'tag was already pushed',
|
|
||||||
detail,
|
|
||||||
409)
|
409)
|
||||||
|
|
||||||
|
|
||||||
class TagInvalid(V2RegistryException):
|
class TagInvalid(V2RegistryException):
|
||||||
def __init__(self, detail=None):
|
def __init__(self, detail=None):
|
||||||
super(TagInvalid, self).__init__('TAG_INVALID',
|
super(TagInvalid, self).__init__('TAG_INVALID', 'manifest tag did not match URI', detail)
|
||||||
'manifest tag did not match URI',
|
|
||||||
detail)
|
|
||||||
|
|
||||||
class LayerTooLarge(V2RegistryException):
|
class LayerTooLarge(V2RegistryException):
|
||||||
def __init__(self, uploaded=None, max_allowed=None):
|
def __init__(self, uploaded=None, max_allowed=None):
|
||||||
|
@ -123,43 +101,33 @@ class LayerTooLarge(V2RegistryException):
|
||||||
detail = {
|
detail = {
|
||||||
'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_allowed),
|
'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_allowed),
|
||||||
'max_allowed': max_allowed,
|
'max_allowed': max_allowed,
|
||||||
'uploaded': uploaded,
|
'uploaded': uploaded,}
|
||||||
}
|
|
||||||
|
|
||||||
up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}")
|
up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}")
|
||||||
max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}")
|
max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}")
|
||||||
message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str, max_str)
|
message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str,
|
||||||
|
max_str)
|
||||||
|
|
||||||
|
|
||||||
class Unauthorized(V2RegistryException):
|
class Unauthorized(V2RegistryException):
|
||||||
def __init__(self, detail=None, repository=None, scopes=None):
|
def __init__(self, detail=None, repository=None, scopes=None):
|
||||||
super(Unauthorized, self).__init__('UNAUTHORIZED',
|
super(Unauthorized,
|
||||||
'access to the requested resource is not authorized',
|
self).__init__('UNAUTHORIZED', 'access to the requested resource is not authorized',
|
||||||
detail,
|
detail, 401, repository=repository, scopes=scopes)
|
||||||
401,
|
|
||||||
repository=repository,
|
|
||||||
scopes=scopes)
|
|
||||||
|
|
||||||
|
|
||||||
class Unsupported(V2RegistryException):
|
class Unsupported(V2RegistryException):
|
||||||
def __init__(self, detail=None, message=None):
|
def __init__(self, detail=None, message=None):
|
||||||
super(Unsupported, self).__init__('UNSUPPORTED',
|
super(Unsupported, self).__init__('UNSUPPORTED', message or 'The operation is unsupported.',
|
||||||
message or 'The operation is unsupported.',
|
detail, 405)
|
||||||
detail,
|
|
||||||
405)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidLogin(V2RegistryException):
|
class InvalidLogin(V2RegistryException):
|
||||||
def __init__(self, message=None):
|
def __init__(self, message=None):
|
||||||
super(InvalidLogin, self).__init__('UNAUTHORIZED',
|
super(InvalidLogin, self).__init__('UNAUTHORIZED', message or
|
||||||
message or 'Specified credentials are invalid',
|
'Specified credentials are invalid', {}, 401)
|
||||||
{},
|
|
||||||
401)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidRequest(V2RegistryException):
|
class InvalidRequest(V2RegistryException):
|
||||||
def __init__(self, message=None):
|
def __init__(self, message=None):
|
||||||
super(InvalidRequest, self).__init__('INVALID_REQUEST',
|
super(InvalidRequest, self).__init__('INVALID_REQUEST', message or 'Invalid request', {}, 400)
|
||||||
message or 'Invalid request',
|
|
||||||
{},
|
|
||||||
400)
|
|
||||||
|
|
|
@ -8,14 +8,15 @@ import features
|
||||||
|
|
||||||
from app import docker_v2_signing_key, app, metric_queue
|
from app import docker_v2_signing_key, app, metric_queue
|
||||||
from auth.registry_jwt_auth import process_registry_jwt_auth
|
from auth.registry_jwt_auth import process_registry_jwt_auth
|
||||||
from data.interfaces.v2 import pre_oci_model as model, Label
|
|
||||||
from digest import digest_tools
|
from digest import digest_tools
|
||||||
from endpoints.common import parse_repository_name
|
from endpoints.common import parse_repository_name
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
|
|
||||||
from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid,
|
|
||||||
NameInvalid)
|
|
||||||
from endpoints.notificationhelper import spawn_notification
|
from endpoints.notificationhelper import spawn_notification
|
||||||
|
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
|
||||||
|
from endpoints.v2.errors import (
|
||||||
|
BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid, NameInvalid)
|
||||||
|
from endpoints.v2.models_interface import Label
|
||||||
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
from image.docker import ManifestException
|
from image.docker import ManifestException
|
||||||
from image.docker.schema1 import DockerSchema1Manifest, DockerSchema1ManifestBuilder
|
from image.docker.schema1 import DockerSchema1Manifest, DockerSchema1ManifestBuilder
|
||||||
from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES
|
from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES
|
||||||
|
@ -24,14 +25,13 @@ from util.names import VALID_TAG_PATTERN
|
||||||
from util.registry.replication import queue_replication_batch
|
from util.registry.replication import queue_replication_batch
|
||||||
from util.validation import is_json
|
from util.validation import is_json
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
BASE_MANIFEST_ROUTE = '/<repopath:repository>/manifests/<regex("{0}"):manifest_ref>'
|
BASE_MANIFEST_ROUTE = '/<repopath:repository>/manifests/<regex("{0}"):manifest_ref>'
|
||||||
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
||||||
MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN)
|
MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN)
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
|
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
|
||||||
@parse_repository_name()
|
@parse_repository_name()
|
||||||
@process_registry_jwt_auth(scopes=['pull'])
|
@process_registry_jwt_auth(scopes=['pull'])
|
||||||
|
@ -51,14 +51,14 @@ def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
|
||||||
repo = model.get_repository(namespace_name, repo_name)
|
repo = model.get_repository(namespace_name, repo_name)
|
||||||
if repo is not None:
|
if repo is not None:
|
||||||
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01,
|
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01,
|
||||||
tag=manifest_ref)
|
tag=manifest_ref)
|
||||||
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
|
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
manifest.json,
|
manifest.json,
|
||||||
status=200,
|
status=200,
|
||||||
headers={'Content-Type': manifest.media_type, 'Docker-Content-Digest': manifest.digest},
|
headers={'Content-Type': manifest.media_type,
|
||||||
)
|
'Docker-Content-Digest': manifest.digest},)
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET'])
|
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET'])
|
||||||
|
@ -77,8 +77,9 @@ def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref):
|
||||||
track_and_log('pull_repo', repo, manifest_digest=manifest_ref)
|
track_and_log('pull_repo', repo, manifest_digest=manifest_ref)
|
||||||
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
|
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
|
||||||
|
|
||||||
return Response(manifest.json, status=200, headers={'Content-Type': manifest.media_type,
|
return Response(manifest.json, status=200, headers={
|
||||||
'Docker-Content-Digest': manifest.digest})
|
'Content-Type': manifest.media_type,
|
||||||
|
'Docker-Content-Digest': manifest.digest})
|
||||||
|
|
||||||
|
|
||||||
def _reject_manifest2_schema2(func):
|
def _reject_manifest2_schema2(func):
|
||||||
|
@ -88,6 +89,7 @@ def _reject_manifest2_schema2(func):
|
||||||
raise ManifestInvalid(detail={'message': 'manifest schema version not supported'},
|
raise ManifestInvalid(detail={'message': 'manifest schema version not supported'},
|
||||||
http_status_code=415)
|
http_status_code=415)
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
@ -130,8 +132,7 @@ def write_manifest_by_digest(namespace_name, repo_name, manifest_ref):
|
||||||
|
|
||||||
|
|
||||||
def _write_manifest(namespace_name, repo_name, manifest):
|
def _write_manifest(namespace_name, repo_name, manifest):
|
||||||
if (manifest.namespace == '' and
|
if (manifest.namespace == '' and features.LIBRARY_SUPPORT and
|
||||||
features.LIBRARY_SUPPORT and
|
|
||||||
namespace_name == app.config['LIBRARY_NAMESPACE']):
|
namespace_name == app.config['LIBRARY_NAMESPACE']):
|
||||||
pass
|
pass
|
||||||
elif manifest.namespace != namespace_name:
|
elif manifest.namespace != namespace_name:
|
||||||
|
@ -173,8 +174,7 @@ def _write_manifest(namespace_name, repo_name, manifest):
|
||||||
rewritten_image.comment,
|
rewritten_image.comment,
|
||||||
rewritten_image.command,
|
rewritten_image.command,
|
||||||
rewritten_image.compat_json,
|
rewritten_image.compat_json,
|
||||||
rewritten_image.parent_image_id,
|
rewritten_image.parent_image_id,)
|
||||||
)
|
|
||||||
except ManifestException as me:
|
except ManifestException as me:
|
||||||
logger.exception("exception when rewriting v1 metadata")
|
logger.exception("exception when rewriting v1 metadata")
|
||||||
raise ManifestInvalid(detail={'message': 'failed synthesizing v1 metadata: %s' % me.message})
|
raise ManifestInvalid(detail={'message': 'failed synthesizing v1 metadata: %s' % me.message})
|
||||||
|
@ -211,12 +211,11 @@ def _write_manifest_and_log(namespace_name, repo_name, manifest):
|
||||||
'OK',
|
'OK',
|
||||||
status=202,
|
status=202,
|
||||||
headers={
|
headers={
|
||||||
'Docker-Content-Digest': manifest.digest,
|
'Docker-Content-Digest':
|
||||||
'Location': url_for('v2.fetch_manifest_by_digest',
|
manifest.digest,
|
||||||
repository='%s/%s' % (namespace_name, repo_name),
|
'Location':
|
||||||
manifest_ref=manifest.digest),
|
url_for('v2.fetch_manifest_by_digest', repository='%s/%s' % (namespace_name, repo_name),
|
||||||
},
|
manifest_ref=manifest.digest),},)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
|
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
|
||||||
|
@ -270,5 +269,6 @@ def _generate_and_store_manifest(namespace_name, repo_name, tag_name):
|
||||||
manifest.bytes)
|
manifest.bytes)
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
|
|
||||||
def _determine_media_type(value):
|
def _determine_media_type(value):
|
||||||
media_type_name = 'application/json' if is_json(value) else 'text/plain'
|
media_type_name = 'application/json' if is_json(value) else 'text/plain'
|
||||||
|
|
258
endpoints/v2/models_interface.py
Normal file
258
endpoints/v2/models_interface.py
Normal file
|
@ -0,0 +1,258 @@
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from namedlist import namedlist
|
||||||
|
from six import add_metaclass
|
||||||
|
|
||||||
|
|
||||||
|
class Repository(
|
||||||
|
namedtuple('Repository', [
|
||||||
|
'id', 'name', 'namespace_name', 'description', 'is_public', 'kind', 'trust_enabled'])):
|
||||||
|
"""
|
||||||
|
Repository represents a namespaced collection of tags.
|
||||||
|
:type id: int
|
||||||
|
:type name: string
|
||||||
|
:type namespace_name: string
|
||||||
|
:type description: string
|
||||||
|
:type is_public: bool
|
||||||
|
:type kind: string
|
||||||
|
:type trust_enabled: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])):
|
||||||
|
"""
|
||||||
|
ManifestJSON represents a Manifest of any format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Tag(namedtuple('Tag', ['name', 'repository'])):
|
||||||
|
"""
|
||||||
|
Tag represents a user-facing alias for referencing a set of Manifests.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BlobUpload(
|
||||||
|
namedlist('BlobUpload', [
|
||||||
|
'uuid', 'byte_count', 'uncompressed_byte_count', 'chunk_count', 'sha_state', 'location_name',
|
||||||
|
'storage_metadata', 'piece_sha_state', 'piece_hashes', 'repo_namespace_name', 'repo_name'])):
|
||||||
|
"""
|
||||||
|
BlobUpload represents the current state of an Blob being uploaded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])):
|
||||||
|
"""
|
||||||
|
Blob represents an opaque binary blob saved to the storage system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
|
||||||
|
"""
|
||||||
|
RepositoryReference represents a reference to a Repository, without its full metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])):
|
||||||
|
"""
|
||||||
|
Label represents a key-value pair that describes a particular Manifest.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@add_metaclass(ABCMeta)
|
||||||
|
class DockerRegistryV2DataInterface(object):
|
||||||
|
"""
|
||||||
|
Interface that represents all data store interactions required by a Docker Registry v1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_repository(self, namespace_name, repo_name, creating_user=None):
|
||||||
|
"""
|
||||||
|
Creates a new repository under the specified namespace with the given name. The user supplied is
|
||||||
|
the user creating the repository, if any.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_repository(self, namespace_name, repo_name):
|
||||||
|
"""
|
||||||
|
Returns a repository tuple for the repository with the given name under the given namespace.
|
||||||
|
Returns None if no such repository was found.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def has_active_tag(self, namespace_name, repo_name, tag_name):
|
||||||
|
"""
|
||||||
|
Returns whether there is an active tag for the tag with the given name under the matching
|
||||||
|
repository, if any, or none if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_manifest_by_tag(self, namespace_name, repo_name, tag_name):
|
||||||
|
"""
|
||||||
|
Returns the current manifest for the tag with the given name under the matching repository, if
|
||||||
|
any, or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_manifest_by_digest(self, namespace_name, repo_name, digest):
|
||||||
|
"""
|
||||||
|
Returns the manifest matching the given digest under the matching repository, if any, or None if
|
||||||
|
none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
|
||||||
|
"""
|
||||||
|
Deletes the manifest with the associated digest (if any) and returns all removed tags that
|
||||||
|
pointed to that manifest. If the manifest was not found, returns an empty list.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name):
|
||||||
|
"""
|
||||||
|
Returns the Docker V1 metadata associated with the tag with the given name under the matching
|
||||||
|
repository, if any. If none, returns None.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids):
|
||||||
|
"""
|
||||||
|
Returns a map of Docker V1 metadata for each given image ID, matched under the repository with
|
||||||
|
the given namespace and name. Returns an empty map if the matching repository was not found.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
|
||||||
|
"""
|
||||||
|
Returns an ordered list containing the Docker V1 metadata for each parent of the image with the
|
||||||
|
given docker ID under the matching repository. Returns an empty list if the image was not found.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest,
|
||||||
|
manifest_bytes):
|
||||||
|
"""
|
||||||
|
Creates a new manifest with the given digest and byte data, and assigns the tag with the given
|
||||||
|
name under the matching repository to it.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def synthesize_v1_image(self, repository, storage, image_id, created, comment, command,
|
||||||
|
compat_json, parent_image_id):
|
||||||
|
"""
|
||||||
|
Synthesizes a V1 image under the specified repository, pointing to the given storage and returns
|
||||||
|
the V1 metadata for the synthesized image.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
|
||||||
|
manifest_digest, manifest_bytes):
|
||||||
|
"""
|
||||||
|
Saves a manifest pointing to the given leaf image, with the given manifest, under the matching
|
||||||
|
repository as a tag with the given name.
|
||||||
|
|
||||||
|
Returns a boolean whether or not the tag was newly created or not.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def repository_tags(self, namespace_name, repo_name, limit, offset):
|
||||||
|
"""
|
||||||
|
Returns the active tags under the repository with the given name and namespace.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_visible_repositories(self, username, limit, offset):
|
||||||
|
"""
|
||||||
|
Returns the repositories visible to the user with the given username, if any.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name,
|
||||||
|
storage_metadata):
|
||||||
|
"""
|
||||||
|
Creates a blob upload under the matching repository with the given UUID and metadata.
|
||||||
|
Returns whether the matching repository exists.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid):
|
||||||
|
"""
|
||||||
|
Searches for a blob upload with the given UUID under the given repository and returns it or None
|
||||||
|
if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def update_blob_upload(self, blob_upload):
|
||||||
|
"""
|
||||||
|
Saves any changes to the blob upload object given to the backing data store.
|
||||||
|
Fields that can change:
|
||||||
|
- uncompressed_byte_count
|
||||||
|
- piece_hashes
|
||||||
|
- piece_sha_state
|
||||||
|
- storage_metadata
|
||||||
|
- byte_count
|
||||||
|
- chunk_count
|
||||||
|
- sha_state
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete_blob_upload(self, namespace_name, repo_name, uuid):
|
||||||
|
"""
|
||||||
|
Deletes the blob upload with the given uuid under the matching repository. If none, does
|
||||||
|
nothing.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
|
||||||
|
expiration_sec):
|
||||||
|
"""
|
||||||
|
Creates a blob and links a temporary tag with the specified expiration to it under the matching
|
||||||
|
repository.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_blob_by_digest(self, namespace_name, repo_name, digest):
|
||||||
|
"""
|
||||||
|
Returns the blob with the given digest under the matching repository or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def save_bittorrent_pieces(self, blob, piece_size, piece_bytes):
|
||||||
|
"""
|
||||||
|
Saves the BitTorrent piece hashes for the given blob.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels):
|
||||||
|
"""
|
||||||
|
Creates a new labels for the provided manifest.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_blob_path(self, blob):
|
||||||
|
"""
|
||||||
|
Once everything is moved over, this could be in util.registry and not even touch the database.
|
||||||
|
"""
|
||||||
|
pass
|
|
@ -1,272 +1,26 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from namedlist import namedlist
|
|
||||||
from peewee import IntegrityError
|
from peewee import IntegrityError
|
||||||
from six import add_metaclass
|
|
||||||
|
|
||||||
from data import model, database
|
from data import model, database
|
||||||
from data.model import DataModelException
|
from data.model import DataModelException
|
||||||
|
from endpoints.v2.models_interface import (
|
||||||
|
Blob,
|
||||||
|
BlobUpload,
|
||||||
|
DockerRegistryV2DataInterface,
|
||||||
|
ManifestJSON,
|
||||||
|
Repository,
|
||||||
|
RepositoryReference,
|
||||||
|
Tag,)
|
||||||
from image.docker.v1 import DockerV1Metadata
|
from image.docker.v1 import DockerV1Metadata
|
||||||
|
|
||||||
_MEDIA_TYPE = "application/vnd.docker.distribution.manifest.v1+prettyjws"
|
_MEDIA_TYPE = "application/vnd.docker.distribution.manifest.v1+prettyjws"
|
||||||
|
|
||||||
|
|
||||||
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
|
|
||||||
'is_public', 'kind', 'trust_enabled'])):
|
|
||||||
"""
|
|
||||||
Repository represents a namespaced collection of tags.
|
|
||||||
:type id: int
|
|
||||||
:type name: string
|
|
||||||
:type namespace_name: string
|
|
||||||
:type description: string
|
|
||||||
:type is_public: bool
|
|
||||||
:type kind: string
|
|
||||||
:type trust_enabled: bool
|
|
||||||
"""
|
|
||||||
|
|
||||||
class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])):
|
|
||||||
"""
|
|
||||||
ManifestJSON represents a Manifest of any format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Tag(namedtuple('Tag', ['name', 'repository'])):
|
|
||||||
"""
|
|
||||||
Tag represents a user-facing alias for referencing a set of Manifests.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class BlobUpload(namedlist('BlobUpload', ['uuid', 'byte_count', 'uncompressed_byte_count',
|
|
||||||
'chunk_count', 'sha_state', 'location_name',
|
|
||||||
'storage_metadata', 'piece_sha_state', 'piece_hashes',
|
|
||||||
'repo_namespace_name', 'repo_name'])):
|
|
||||||
"""
|
|
||||||
BlobUpload represents the current state of an Blob being uploaded.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])):
|
|
||||||
"""
|
|
||||||
Blob represents an opaque binary blob saved to the storage system.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
|
|
||||||
"""
|
|
||||||
RepositoryReference represents a reference to a Repository, without its full metadata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])):
|
|
||||||
"""
|
|
||||||
Label represents a key-value pair that describes a particular Manifest.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@add_metaclass(ABCMeta)
|
|
||||||
class DockerRegistryV2DataInterface(object):
|
|
||||||
"""
|
|
||||||
Interface that represents all data store interactions required by a Docker Registry v1.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create_repository(self, namespace_name, repo_name, creating_user=None):
|
|
||||||
"""
|
|
||||||
Creates a new repository under the specified namespace with the given name. The user supplied is
|
|
||||||
the user creating the repository, if any.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_repository(self, namespace_name, repo_name):
|
|
||||||
"""
|
|
||||||
Returns a repository tuple for the repository with the given name under the given namespace.
|
|
||||||
Returns None if no such repository was found.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def has_active_tag(self, namespace_name, repo_name, tag_name):
|
|
||||||
"""
|
|
||||||
Returns whether there is an active tag for the tag with the given name under the matching
|
|
||||||
repository, if any, or none if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_manifest_by_tag(self, namespace_name, repo_name, tag_name):
|
|
||||||
"""
|
|
||||||
Returns the current manifest for the tag with the given name under the matching repository, if
|
|
||||||
any, or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_manifest_by_digest(self, namespace_name, repo_name, digest):
|
|
||||||
"""
|
|
||||||
Returns the manifest matching the given digest under the matching repository, if any, or None if
|
|
||||||
none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
|
|
||||||
"""
|
|
||||||
Deletes the manifest with the associated digest (if any) and returns all removed tags that
|
|
||||||
pointed to that manifest. If the manifest was not found, returns an empty list.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name):
|
|
||||||
"""
|
|
||||||
Returns the Docker V1 metadata associated with the tag with the given name under the matching
|
|
||||||
repository, if any. If none, returns None.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids):
|
|
||||||
"""
|
|
||||||
Returns a map of Docker V1 metadata for each given image ID, matched under the repository with
|
|
||||||
the given namespace and name. Returns an empty map if the matching repository was not found.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
|
|
||||||
"""
|
|
||||||
Returns an ordered list containing the Docker V1 metadata for each parent of the image with the
|
|
||||||
given docker ID under the matching repository. Returns an empty list if the image was not found.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest,
|
|
||||||
manifest_bytes):
|
|
||||||
"""
|
|
||||||
Creates a new manifest with the given digest and byte data, and assigns the tag with the given
|
|
||||||
name under the matching repository to it.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def synthesize_v1_image(self, repository, storage, image_id, created, comment, command,
|
|
||||||
compat_json, parent_image_id):
|
|
||||||
"""
|
|
||||||
Synthesizes a V1 image under the specified repository, pointing to the given storage and returns
|
|
||||||
the V1 metadata for the synthesized image.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
|
|
||||||
manifest_digest, manifest_bytes):
|
|
||||||
"""
|
|
||||||
Saves a manifest pointing to the given leaf image, with the given manifest, under the matching
|
|
||||||
repository as a tag with the given name.
|
|
||||||
|
|
||||||
Returns a boolean whether or not the tag was newly created or not.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def repository_tags(self, namespace_name, repo_name, limit, offset):
|
|
||||||
"""
|
|
||||||
Returns the active tags under the repository with the given name and namespace.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_visible_repositories(self, username, limit, offset):
|
|
||||||
"""
|
|
||||||
Returns the repositories visible to the user with the given username, if any.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata):
|
|
||||||
"""
|
|
||||||
Creates a blob upload under the matching repository with the given UUID and metadata.
|
|
||||||
Returns whether the matching repository exists.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid):
|
|
||||||
"""
|
|
||||||
Searches for a blob upload with the given UUID under the given repository and returns it or None
|
|
||||||
if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def update_blob_upload(self, blob_upload):
|
|
||||||
"""
|
|
||||||
Saves any changes to the blob upload object given to the backing data store.
|
|
||||||
Fields that can change:
|
|
||||||
- uncompressed_byte_count
|
|
||||||
- piece_hashes
|
|
||||||
- piece_sha_state
|
|
||||||
- storage_metadata
|
|
||||||
- byte_count
|
|
||||||
- chunk_count
|
|
||||||
- sha_state
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def delete_blob_upload(self, namespace_name, repo_name, uuid):
|
|
||||||
"""
|
|
||||||
Deletes the blob upload with the given uuid under the matching repository. If none, does
|
|
||||||
nothing.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
|
|
||||||
expiration_sec):
|
|
||||||
"""
|
|
||||||
Creates a blob and links a temporary tag with the specified expiration to it under the matching
|
|
||||||
repository.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_blob_by_digest(self, namespace_name, repo_name, digest):
|
|
||||||
"""
|
|
||||||
Returns the blob with the given digest under the matching repository or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def save_bittorrent_pieces(self, blob, piece_size, piece_bytes):
|
|
||||||
"""
|
|
||||||
Saves the BitTorrent piece hashes for the given blob.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels):
|
|
||||||
"""
|
|
||||||
Creates a new labels for the provided manifest.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_blob_path(self, blob):
|
|
||||||
"""
|
|
||||||
Once everything is moved over, this could be in util.registry and not even touch the database.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PreOCIModel(DockerRegistryV2DataInterface):
|
class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
"""
|
"""
|
||||||
PreOCIModel implements the data model for the v2 Docker Registry protocol using a database schema
|
PreOCIModel implements the data model for the v2 Docker Registry protocol using a database schema
|
||||||
before it was changed to support the OCI specification.
|
before it was changed to support the OCI specification.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def create_repository(self, namespace_name, repo_name, creating_user=None):
|
def create_repository(self, namespace_name, repo_name, creating_user=None):
|
||||||
return model.repository.create_repository(namespace_name, repo_name, creating_user)
|
return model.repository.create_repository(namespace_name, repo_name, creating_user)
|
||||||
|
|
||||||
|
@ -299,14 +53,10 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
|
|
||||||
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
|
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
|
||||||
def _tag_view(tag):
|
def _tag_view(tag):
|
||||||
return Tag(
|
return Tag(name=tag.name, repository=RepositoryReference(
|
||||||
name=tag.name,
|
id=tag.repository_id,
|
||||||
repository=RepositoryReference(
|
name=repo_name,
|
||||||
id=tag.repository_id,
|
namespace_name=namespace_name,))
|
||||||
name=repo_name,
|
|
||||||
namespace_name=namespace_name,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
tags = model.tag.delete_manifest_by_digest(namespace_name, repo_name, digest)
|
tags = model.tag.delete_manifest_by_digest(namespace_name, repo_name, digest)
|
||||||
return [_tag_view(tag) for tag in tags]
|
return [_tag_view(tag) for tag in tags]
|
||||||
|
@ -324,8 +74,9 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
images_query = model.image.lookup_repository_images(repo, docker_image_ids)
|
images_query = model.image.lookup_repository_images(repo, docker_image_ids)
|
||||||
return {image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image)
|
return {
|
||||||
for image in images_query}
|
image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image)
|
||||||
|
for image in images_query}
|
||||||
|
|
||||||
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
|
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
|
||||||
repo_image = model.image.get_repo_image(namespace_name, repo_name, docker_image_id)
|
repo_image = model.image.get_repo_image(namespace_name, repo_name, docker_image_id)
|
||||||
|
@ -367,21 +118,16 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
|
|
||||||
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
|
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
|
||||||
manifest_digest, manifest_bytes):
|
manifest_digest, manifest_bytes):
|
||||||
(_, newly_created) = model.tag.store_tag_manifest(namespace_name, repo_name, tag_name,
|
(_, newly_created) = model.tag.store_tag_manifest(
|
||||||
leaf_layer_docker_id, manifest_digest,
|
namespace_name, repo_name, tag_name, leaf_layer_docker_id, manifest_digest, manifest_bytes)
|
||||||
manifest_bytes)
|
|
||||||
return newly_created
|
return newly_created
|
||||||
|
|
||||||
def repository_tags(self, namespace_name, repo_name, limit, offset):
|
def repository_tags(self, namespace_name, repo_name, limit, offset):
|
||||||
def _tag_view(tag):
|
def _tag_view(tag):
|
||||||
return Tag(
|
return Tag(name=tag.name, repository=RepositoryReference(
|
||||||
name=tag.name,
|
id=tag.repository_id,
|
||||||
repository=RepositoryReference(
|
name=repo_name,
|
||||||
id=tag.repository_id,
|
namespace_name=namespace_name,))
|
||||||
name=repo_name,
|
|
||||||
namespace_name=namespace_name,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
tags_query = model.tag.list_repository_tags(namespace_name, repo_name)
|
tags_query = model.tag.list_repository_tags(namespace_name, repo_name)
|
||||||
tags_query = tags_query.limit(limit).offset(offset)
|
tags_query = tags_query.limit(limit).offset(offset)
|
||||||
|
@ -396,7 +142,8 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
query = query.limit(limit).offset(offset)
|
query = query.limit(limit).offset(offset)
|
||||||
return [_repository_for_repo(repo) for repo in query]
|
return [_repository_for_repo(repo) for repo in query]
|
||||||
|
|
||||||
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata):
|
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name,
|
||||||
|
storage_metadata):
|
||||||
try:
|
try:
|
||||||
model.blob.initiate_upload(namespace_name, repo_name, upload_uuid, location_name,
|
model.blob.initiate_upload(namespace_name, repo_name, upload_uuid, location_name,
|
||||||
storage_metadata)
|
storage_metadata)
|
||||||
|
@ -421,8 +168,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
piece_sha_state=found.piece_sha_state,
|
piece_sha_state=found.piece_sha_state,
|
||||||
piece_hashes=found.piece_hashes,
|
piece_hashes=found.piece_hashes,
|
||||||
location_name=found.location.name,
|
location_name=found.location.name,
|
||||||
storage_metadata=found.storage_metadata,
|
storage_metadata=found.storage_metadata,)
|
||||||
)
|
|
||||||
|
|
||||||
def update_blob_upload(self, blob_upload):
|
def update_blob_upload(self, blob_upload):
|
||||||
# Lookup the blob upload object.
|
# Lookup the blob upload object.
|
||||||
|
@ -451,17 +197,14 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
|
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
|
||||||
expiration_sec):
|
expiration_sec):
|
||||||
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
|
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
|
||||||
blob_record = model.blob.store_blob_record_and_temp_link(namespace_name, repo_name,
|
blob_record = model.blob.store_blob_record_and_temp_link(
|
||||||
blob_digest, location_obj.id,
|
namespace_name, repo_name, blob_digest, location_obj.id, blob_upload.byte_count,
|
||||||
blob_upload.byte_count,
|
expiration_sec, blob_upload.uncompressed_byte_count)
|
||||||
expiration_sec,
|
|
||||||
blob_upload.uncompressed_byte_count)
|
|
||||||
return Blob(
|
return Blob(
|
||||||
uuid=blob_record.uuid,
|
uuid=blob_record.uuid,
|
||||||
digest=blob_digest,
|
digest=blob_digest,
|
||||||
size=blob_upload.byte_count,
|
size=blob_upload.byte_count,
|
||||||
locations=[blob_upload.location_name],
|
locations=[blob_upload.location_name],)
|
||||||
)
|
|
||||||
|
|
||||||
def lookup_blobs_by_digest(self, namespace_name, repo_name, digests):
|
def lookup_blobs_by_digest(self, namespace_name, repo_name, digests):
|
||||||
def _blob_view(blob_record):
|
def _blob_view(blob_record):
|
||||||
|
@ -469,7 +212,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
uuid=blob_record.uuid,
|
uuid=blob_record.uuid,
|
||||||
digest=blob_record.content_checksum,
|
digest=blob_record.content_checksum,
|
||||||
size=blob_record.image_size,
|
size=blob_record.image_size,
|
||||||
locations=None, # Note: Locations is None in this case.
|
locations=None, # Note: Locations is None in this case.
|
||||||
)
|
)
|
||||||
|
|
||||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||||
|
@ -485,8 +228,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
|
||||||
uuid=blob_record.uuid,
|
uuid=blob_record.uuid,
|
||||||
digest=digest,
|
digest=digest,
|
||||||
size=blob_record.image_size,
|
size=blob_record.image_size,
|
||||||
locations=blob_record.locations,
|
locations=blob_record.locations,)
|
||||||
)
|
|
||||||
except model.BlobDoesNotExist:
|
except model.BlobDoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -527,8 +269,7 @@ def _docker_v1_metadata(namespace_name, repo_name, repo_image):
|
||||||
comment=repo_image.comment,
|
comment=repo_image.comment,
|
||||||
command=repo_image.command,
|
command=repo_image.command,
|
||||||
# TODO: make sure this isn't needed anywhere, as it is expensive to lookup
|
# TODO: make sure this isn't needed anywhere, as it is expensive to lookup
|
||||||
parent_image_id=None,
|
parent_image_id=None,)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _repository_for_repo(repo):
|
def _repository_for_repo(repo):
|
||||||
|
@ -540,8 +281,7 @@ def _repository_for_repo(repo):
|
||||||
description=repo.description,
|
description=repo.description,
|
||||||
is_public=model.repository.is_repository_public(repo),
|
is_public=model.repository.is_repository_public(repo),
|
||||||
kind=model.repository.get_repo_kind_name(repo),
|
kind=model.repository.get_repo_kind_name(repo),
|
||||||
trust_enabled=repo.trust_enabled,
|
trust_enabled=repo.trust_enabled,)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
pre_oci_model = PreOCIModel()
|
data_model = PreOCIModel()
|
|
@ -2,9 +2,10 @@ from flask import jsonify
|
||||||
|
|
||||||
from auth.registry_jwt_auth import process_registry_jwt_auth
|
from auth.registry_jwt_auth import process_registry_jwt_auth
|
||||||
from endpoints.common import parse_repository_name
|
from endpoints.common import parse_repository_name
|
||||||
from endpoints.v2 import v2_bp, require_repo_read, paginate
|
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from data.interfaces.v2 import pre_oci_model as model
|
from endpoints.v2 import v2_bp, require_repo_read, paginate
|
||||||
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
|
|
||||||
|
|
||||||
@v2_bp.route('/<repopath:repository>/tags/list', methods=['GET'])
|
@v2_bp.route('/<repopath:repository>/tags/list', methods=['GET'])
|
||||||
@parse_repository_name()
|
@parse_repository_name()
|
||||||
|
@ -16,8 +17,7 @@ def list_all_tags(namespace_name, repo_name, limit, offset, pagination_callback)
|
||||||
tags = model.repository_tags(namespace_name, repo_name, limit, offset)
|
tags = model.repository_tags(namespace_name, repo_name, limit, offset)
|
||||||
response = jsonify({
|
response = jsonify({
|
||||||
'name': '{0}/{1}'.format(namespace_name, repo_name),
|
'name': '{0}/{1}'.format(namespace_name, repo_name),
|
||||||
'tags': [tag.name for tag in tags],
|
'tags': [tag.name for tag in tags],})
|
||||||
})
|
|
||||||
|
|
||||||
pagination_callback(len(tags), response)
|
pagination_callback(len(tags), response)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -13,18 +13,18 @@ from auth.permissions import (ModifyRepositoryPermission, ReadRepositoryPermissi
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from endpoints.v2 import v2_bp
|
from endpoints.v2 import v2_bp
|
||||||
from endpoints.v2.errors import InvalidLogin, NameInvalid, InvalidRequest, Unsupported, Unauthorized
|
from endpoints.v2.errors import InvalidLogin, NameInvalid, InvalidRequest, Unsupported, Unauthorized
|
||||||
from data.interfaces.v2 import pre_oci_model as model
|
from endpoints.v2.models_pre_oci import data_model as model
|
||||||
from util.cache import no_cache
|
from util.cache import no_cache
|
||||||
from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX
|
from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX
|
||||||
from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, QUAY_TUF_ROOT,
|
from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject,
|
||||||
SIGNER_TUF_ROOT, DISABLED_TUF_ROOT)
|
QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
|
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
|
||||||
SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)*[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$'
|
SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)*[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$'
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
@lru_cache(maxsize=1)
|
||||||
def get_scope_regex():
|
def get_scope_regex():
|
||||||
hostname = re.escape(app.config['SERVER_HOSTNAME'])
|
hostname = re.escape(app.config['SERVER_HOSTNAME'])
|
||||||
|
@ -64,8 +64,7 @@ def generate_registry_jwt(auth_result):
|
||||||
|
|
||||||
access = []
|
access = []
|
||||||
user_event_data = {
|
user_event_data = {
|
||||||
'action': 'login',
|
'action': 'login',}
|
||||||
}
|
|
||||||
tuf_root = DISABLED_TUF_ROOT
|
tuf_root = DISABLED_TUF_ROOT
|
||||||
|
|
||||||
if len(scope_param) > 0:
|
if len(scope_param) > 0:
|
||||||
|
@ -101,8 +100,8 @@ def generate_registry_jwt(auth_result):
|
||||||
repo_is_public = repo is not None and repo.is_public
|
repo_is_public = repo is not None and repo.is_public
|
||||||
invalid_repo_message = ''
|
invalid_repo_message = ''
|
||||||
if repo is not None and repo.kind != 'image':
|
if repo is not None and repo.kind != 'image':
|
||||||
invalid_repo_message = (('This repository is for managing %s resources ' +
|
invalid_repo_message = ((
|
||||||
'and not container images.') % repo.kind)
|
'This repository is for managing %s resources ' + 'and not container images.') % repo.kind)
|
||||||
|
|
||||||
if 'push' in actions:
|
if 'push' in actions:
|
||||||
# If there is no valid user or token, then the repository cannot be
|
# If there is no valid user or token, then the repository cannot be
|
||||||
|
@ -150,8 +149,7 @@ def generate_registry_jwt(auth_result):
|
||||||
access.append({
|
access.append({
|
||||||
'type': 'repository',
|
'type': 'repository',
|
||||||
'name': registry_and_repo,
|
'name': registry_and_repo,
|
||||||
'actions': final_actions,
|
'actions': final_actions,})
|
||||||
})
|
|
||||||
|
|
||||||
# Set the user event data for the auth.
|
# Set the user event data for the auth.
|
||||||
if 'push' in final_actions:
|
if 'push' in final_actions:
|
||||||
|
@ -164,8 +162,7 @@ def generate_registry_jwt(auth_result):
|
||||||
user_event_data = {
|
user_event_data = {
|
||||||
'action': user_action,
|
'action': user_action,
|
||||||
'repository': reponame,
|
'repository': reponame,
|
||||||
'namespace': namespace,
|
'namespace': namespace,}
|
||||||
}
|
|
||||||
tuf_root = get_tuf_root(repo, namespace, reponame)
|
tuf_root = get_tuf_root(repo, namespace, reponame)
|
||||||
|
|
||||||
elif user is None and token is None:
|
elif user is None and token is None:
|
||||||
|
@ -179,7 +176,8 @@ def generate_registry_jwt(auth_result):
|
||||||
event.publish_event_data('docker-cli', user_event_data)
|
event.publish_event_data('docker-cli', user_event_data)
|
||||||
|
|
||||||
# Build the signed JWT.
|
# Build the signed JWT.
|
||||||
context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken, tuf_root=tuf_root)
|
context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken,
|
||||||
|
tuf_root=tuf_root)
|
||||||
token = generate_bearer_token(audience_param, subject, context, access,
|
token = generate_bearer_token(audience_param, subject, context, access,
|
||||||
TOKEN_VALIDITY_LIFETIME_S, instance_keys)
|
TOKEN_VALIDITY_LIFETIME_S, instance_keys)
|
||||||
return jsonify({'token': token})
|
return jsonify({'token': token})
|
||||||
|
@ -188,7 +186,7 @@ def generate_registry_jwt(auth_result):
|
||||||
def get_tuf_root(repo, namespace, reponame):
|
def get_tuf_root(repo, namespace, reponame):
|
||||||
if not features.SIGNING or repo is None or not repo.trust_enabled:
|
if not features.SIGNING or repo is None or not repo.trust_enabled:
|
||||||
return DISABLED_TUF_ROOT
|
return DISABLED_TUF_ROOT
|
||||||
|
|
||||||
# Users with write access to a repo will see signer-rooted TUF metadata
|
# Users with write access to a repo will see signer-rooted TUF metadata
|
||||||
if ModifyRepositoryPermission(namespace, reponame).can():
|
if ModifyRepositoryPermission(namespace, reponame).can():
|
||||||
return SIGNER_TUF_ROOT
|
return SIGNER_TUF_ROOT
|
||||||
|
|
|
@ -10,9 +10,9 @@ from auth.auth_context import get_authenticated_user
|
||||||
from auth.decorators import process_auth
|
from auth.decorators import process_auth
|
||||||
from auth.permissions import ReadRepositoryPermission
|
from auth.permissions import ReadRepositoryPermission
|
||||||
from data import database
|
from data import database
|
||||||
from data.interfaces.verbs import pre_oci_model as model
|
|
||||||
from endpoints.common import route_show_if, parse_repository_name
|
from endpoints.common import route_show_if, parse_repository_name
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
|
from endpoints.verbs.models_pre_oci import pre_oci_model as model
|
||||||
from endpoints.v2.blob import BLOB_DIGEST_ROUTE
|
from endpoints.v2.blob import BLOB_DIGEST_ROUTE
|
||||||
from image.appc import AppCImageFormatter
|
from image.appc import AppCImageFormatter
|
||||||
from image.docker.squashed import SquashedDockerImageFormatter
|
from image.docker.squashed import SquashedDockerImageFormatter
|
||||||
|
@ -22,16 +22,14 @@ from util.http import exact_abort
|
||||||
from util.registry.filelike import wrap_with_handler
|
from util.registry.filelike import wrap_with_handler
|
||||||
from util.registry.queuefile import QueueFile
|
from util.registry.queuefile import QueueFile
|
||||||
from util.registry.queueprocess import QueueProcess
|
from util.registry.queueprocess import QueueProcess
|
||||||
from util.registry.torrent import (make_torrent, per_user_torrent_filename, public_torrent_filename,
|
from util.registry.torrent import (
|
||||||
PieceHasher)
|
make_torrent, per_user_torrent_filename, public_torrent_filename, PieceHasher)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
verbs = Blueprint('verbs', __name__)
|
verbs = Blueprint('verbs', __name__)
|
||||||
license_validator.enforce_license_before_request(verbs)
|
license_validator.enforce_license_before_request(verbs)
|
||||||
|
|
||||||
|
|
||||||
LAYER_MIMETYPE = 'binary/octet-stream'
|
LAYER_MIMETYPE = 'binary/octet-stream'
|
||||||
|
|
||||||
|
|
||||||
|
@ -60,7 +58,8 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
|
||||||
logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path)
|
logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path)
|
||||||
yield current_image_stream
|
yield current_image_stream
|
||||||
|
|
||||||
stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image, get_next_layer)
|
stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image,
|
||||||
|
get_next_layer)
|
||||||
|
|
||||||
for handler_fn in handlers:
|
for handler_fn in handlers:
|
||||||
stream = wrap_with_handler(stream, handler_fn)
|
stream = wrap_with_handler(stream, handler_fn)
|
||||||
|
@ -89,6 +88,7 @@ def _write_derived_image_to_storage(verb, derived_image, queue_file):
|
||||||
""" Read from the generated stream and write it back to the storage engine. This method runs in a
|
""" Read from the generated stream and write it back to the storage engine. This method runs in a
|
||||||
separate process.
|
separate process.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def handle_exception(ex):
|
def handle_exception(ex):
|
||||||
logger.debug('Exception when building %s derived image %s: %s', verb, derived_image.ref, ex)
|
logger.debug('Exception when building %s derived image %s: %s', verb, derived_image.ref, ex)
|
||||||
|
|
||||||
|
@ -139,8 +139,9 @@ def _torrent_for_blob(blob, is_public):
|
||||||
torrent_file = make_torrent(name, webseed, blob.size, torrent_info.piece_length,
|
torrent_file = make_torrent(name, webseed, blob.size, torrent_info.piece_length,
|
||||||
torrent_info.pieces)
|
torrent_info.pieces)
|
||||||
|
|
||||||
headers = {'Content-Type': 'application/x-bittorrent',
|
headers = {
|
||||||
'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)}
|
'Content-Type': 'application/x-bittorrent',
|
||||||
|
'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)}
|
||||||
|
|
||||||
return make_response(torrent_file, 200, headers)
|
return make_response(torrent_file, 200, headers)
|
||||||
|
|
||||||
|
@ -158,8 +159,7 @@ def _torrent_repo_verb(repo_image, tag, verb, **kwargs):
|
||||||
abort(406)
|
abort(406)
|
||||||
|
|
||||||
# Return the torrent.
|
# Return the torrent.
|
||||||
repo = model.get_repository(repo_image.repository.namespace_name,
|
repo = model.get_repository(repo_image.repository.namespace_name, repo_image.repository.name)
|
||||||
repo_image.repository.name)
|
|
||||||
repo_is_public = repo is not None and repo.is_public
|
repo_is_public = repo is not None and repo.is_public
|
||||||
torrent = _torrent_for_blob(derived_image.blob, repo_is_public)
|
torrent = _torrent_for_blob(derived_image.blob, repo_is_public)
|
||||||
|
|
||||||
|
@ -229,15 +229,14 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
||||||
metric_queue.repository_pull.Inc(labelvalues=[namespace, repository, verb, True])
|
metric_queue.repository_pull.Inc(labelvalues=[namespace, repository, verb, True])
|
||||||
|
|
||||||
# Lookup/create the derived image for the verb and repo image.
|
# Lookup/create the derived image for the verb and repo image.
|
||||||
derived_image = model.lookup_or_create_derived_image(repo_image, verb,
|
derived_image = model.lookup_or_create_derived_image(
|
||||||
storage.preferred_locations[0],
|
repo_image, verb, storage.preferred_locations[0], varying_metadata={'tag': tag})
|
||||||
varying_metadata={'tag': tag})
|
|
||||||
if not derived_image.blob.uploading:
|
if not derived_image.blob.uploading:
|
||||||
logger.debug('Derived %s image %s exists in storage', verb, derived_image.ref)
|
logger.debug('Derived %s image %s exists in storage', verb, derived_image.ref)
|
||||||
derived_layer_path = model.get_blob_path(derived_image.blob)
|
derived_layer_path = model.get_blob_path(derived_image.blob)
|
||||||
is_head_request = request.method == 'HEAD'
|
is_head_request = request.method == 'HEAD'
|
||||||
download_url = storage.get_direct_download_url(derived_image.blob.locations, derived_layer_path,
|
download_url = storage.get_direct_download_url(derived_image.blob.locations,
|
||||||
head=is_head_request)
|
derived_layer_path, head=is_head_request)
|
||||||
if download_url:
|
if download_url:
|
||||||
logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image.ref)
|
logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image.ref)
|
||||||
return redirect(download_url)
|
return redirect(download_url)
|
||||||
|
@ -246,8 +245,9 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
||||||
database.close_db_filter(None)
|
database.close_db_filter(None)
|
||||||
|
|
||||||
logger.debug('Sending cached derived %s image %s', verb, derived_image.ref)
|
logger.debug('Sending cached derived %s image %s', verb, derived_image.ref)
|
||||||
return send_file(storage.stream_read_file(derived_image.blob.locations, derived_layer_path),
|
return send_file(
|
||||||
mimetype=LAYER_MIMETYPE)
|
storage.stream_read_file(derived_image.blob.locations, derived_layer_path),
|
||||||
|
mimetype=LAYER_MIMETYPE)
|
||||||
|
|
||||||
logger.debug('Building and returning derived %s image %s', verb, derived_image.ref)
|
logger.debug('Building and returning derived %s image %s', verb, derived_image.ref)
|
||||||
|
|
||||||
|
@ -270,9 +270,12 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
||||||
# and send the results to the client and storage.
|
# and send the results to the client and storage.
|
||||||
handlers = [hasher.update]
|
handlers = [hasher.update]
|
||||||
args = (formatter, repo_image, tag, derived_image_id, handlers)
|
args = (formatter, repo_image, tag, derived_image_id, handlers)
|
||||||
queue_process = QueueProcess(_open_stream,
|
queue_process = QueueProcess(
|
||||||
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
|
_open_stream,
|
||||||
args, finished=_store_metadata_and_cleanup)
|
8 * 1024,
|
||||||
|
10 * 1024 * 1024, # 8K/10M chunk/max
|
||||||
|
args,
|
||||||
|
finished=_store_metadata_and_cleanup)
|
||||||
|
|
||||||
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
|
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
|
||||||
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
|
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
|
||||||
|
@ -336,11 +339,13 @@ def get_aci_signature(server, namespace, repository, tag, os, arch):
|
||||||
|
|
||||||
@route_show_if(features.ACI_CONVERSION)
|
@route_show_if(features.ACI_CONVERSION)
|
||||||
@anon_protect
|
@anon_protect
|
||||||
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=['GET', 'HEAD'])
|
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=[
|
||||||
|
'GET', 'HEAD'])
|
||||||
@process_auth
|
@process_auth
|
||||||
def get_aci_image(server, namespace, repository, tag, os, arch):
|
def get_aci_image(server, namespace, repository, tag, os, arch):
|
||||||
return _repo_verb(namespace, repository, tag, 'aci', AppCImageFormatter(),
|
return _repo_verb(namespace, repository, tag, 'aci',
|
||||||
sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch)
|
AppCImageFormatter(), sign=True, checker=os_arch_checker(os, arch), os=os,
|
||||||
|
arch=arch)
|
||||||
|
|
||||||
|
|
||||||
@anon_protect
|
@anon_protect
|
||||||
|
|
154
endpoints/verbs/models_interface.py
Normal file
154
endpoints/verbs/models_interface.py
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from six import add_metaclass
|
||||||
|
|
||||||
|
|
||||||
|
class Repository(
|
||||||
|
namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', 'is_public',
|
||||||
|
'kind'])):
|
||||||
|
"""
|
||||||
|
Repository represents a namespaced collection of tags.
|
||||||
|
:type id: int
|
||||||
|
:type name: string
|
||||||
|
:type namespace_name: string
|
||||||
|
:type description: string
|
||||||
|
:type is_public: bool
|
||||||
|
:type kind: string
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedImage(namedtuple('DerivedImage', ['ref', 'blob', 'internal_source_image_db_id'])):
|
||||||
|
"""
|
||||||
|
DerivedImage represents a user-facing alias for an image which was derived from another image.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
|
||||||
|
"""
|
||||||
|
RepositoryReference represents a reference to a Repository, without its full metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ImageWithBlob(
|
||||||
|
namedtuple('Image', [
|
||||||
|
'image_id', 'blob', 'compat_metadata', 'repository', 'internal_db_id', 'v1_metadata'])):
|
||||||
|
"""
|
||||||
|
ImageWithBlob represents a user-facing alias for referencing an image, along with its blob.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Blob(namedtuple('Blob', ['uuid', 'size', 'uncompressed_size', 'uploading', 'locations'])):
|
||||||
|
"""
|
||||||
|
Blob represents an opaque binary blob saved to the storage system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TorrentInfo(namedtuple('TorrentInfo', ['piece_length', 'pieces'])):
|
||||||
|
"""
|
||||||
|
TorrentInfo represents the torrent piece information associated with a blob.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@add_metaclass(ABCMeta)
|
||||||
|
class VerbsDataInterface(object):
|
||||||
|
"""
|
||||||
|
Interface that represents all data store interactions required by the registry's custom HTTP
|
||||||
|
verbs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_repository(self, namespace_name, repo_name):
|
||||||
|
"""
|
||||||
|
Returns a repository tuple for the repository with the given name under the given namespace.
|
||||||
|
Returns None if no such repository was found.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_manifest_layers_with_blobs(self, repo_image):
|
||||||
|
"""
|
||||||
|
Returns the full set of manifest layers and their associated blobs starting at the given
|
||||||
|
repository image and working upwards to the root image.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_blob_path(self, blob):
|
||||||
|
"""
|
||||||
|
Returns the storage path for the given blob.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_derived_image_signature(self, derived_image, signer_name):
|
||||||
|
"""
|
||||||
|
Returns the signature associated with the derived image and a specific signer or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
||||||
|
"""
|
||||||
|
Sets the calculated signature for the given derived image and signer to that specified.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete_derived_image(self, derived_image):
|
||||||
|
"""
|
||||||
|
Deletes a derived image and all of its storage.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_blob_size(self, blob, size):
|
||||||
|
"""
|
||||||
|
Sets the size field on a blob to the value specified.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_repo_blob_by_digest(self, namespace_name, repo_name, digest):
|
||||||
|
"""
|
||||||
|
Returns the blob with the given digest under the matching repository or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_torrent_info(self, blob):
|
||||||
|
"""
|
||||||
|
Returns the torrent information associated with the given blob or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_torrent_info(self, blob, piece_length, pieces):
|
||||||
|
"""
|
||||||
|
Sets the torrent infomation associated with the given blob to that specified.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def lookup_derived_image(self, repo_image, verb, varying_metadata=None):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given repository image, verb and optional varying metadata
|
||||||
|
and returns it or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def lookup_or_create_derived_image(self, repo_image, verb, location, varying_metadata=None):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given repository image, verb and optional varying metadata
|
||||||
|
and returns it. If none exists, a new derived image is created.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_tag_image(self, namespace_name, repo_name, tag_name):
|
||||||
|
"""
|
||||||
|
Returns the image associated with the live tag with the given name under the matching repository
|
||||||
|
or None if none.
|
||||||
|
"""
|
||||||
|
pass
|
|
@ -1,155 +1,16 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from six import add_metaclass
|
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from image.docker.v1 import DockerV1Metadata
|
from image.docker.v1 import DockerV1Metadata
|
||||||
|
|
||||||
|
from endpoints.verbs.models_interface import (
|
||||||
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
|
Blob,
|
||||||
'is_public', 'kind'])):
|
DerivedImage,
|
||||||
"""
|
ImageWithBlob,
|
||||||
Repository represents a namespaced collection of tags.
|
Repository,
|
||||||
:type id: int
|
RepositoryReference,
|
||||||
:type name: string
|
TorrentInfo,
|
||||||
:type namespace_name: string
|
VerbsDataInterface,)
|
||||||
:type description: string
|
|
||||||
:type is_public: bool
|
|
||||||
:type kind: string
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class DerivedImage(namedtuple('DerivedImage', ['ref', 'blob', 'internal_source_image_db_id'])):
|
|
||||||
"""
|
|
||||||
DerivedImage represents a user-facing alias for an image which was derived from another image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
|
|
||||||
"""
|
|
||||||
RepositoryReference represents a reference to a Repository, without its full metadata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class ImageWithBlob(namedtuple('Image', ['image_id', 'blob', 'compat_metadata', 'repository',
|
|
||||||
'internal_db_id', 'v1_metadata'])):
|
|
||||||
"""
|
|
||||||
ImageWithBlob represents a user-facing alias for referencing an image, along with its blob.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Blob(namedtuple('Blob', ['uuid', 'size', 'uncompressed_size', 'uploading', 'locations'])):
|
|
||||||
"""
|
|
||||||
Blob represents an opaque binary blob saved to the storage system.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class TorrentInfo(namedtuple('TorrentInfo', ['piece_length', 'pieces'])):
|
|
||||||
"""
|
|
||||||
TorrentInfo represents the torrent piece information associated with a blob.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@add_metaclass(ABCMeta)
|
|
||||||
class VerbsDataInterface(object):
|
|
||||||
"""
|
|
||||||
Interface that represents all data store interactions required by the registry's custom HTTP
|
|
||||||
verbs.
|
|
||||||
"""
|
|
||||||
@abstractmethod
|
|
||||||
def get_repository(self, namespace_name, repo_name):
|
|
||||||
"""
|
|
||||||
Returns a repository tuple for the repository with the given name under the given namespace.
|
|
||||||
Returns None if no such repository was found.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_manifest_layers_with_blobs(self, repo_image):
|
|
||||||
"""
|
|
||||||
Returns the full set of manifest layers and their associated blobs starting at the given
|
|
||||||
repository image and working upwards to the root image.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_blob_path(self, blob):
|
|
||||||
"""
|
|
||||||
Returns the storage path for the given blob.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_derived_image_signature(self, derived_image, signer_name):
|
|
||||||
"""
|
|
||||||
Returns the signature associated with the derived image and a specific signer or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
|
||||||
"""
|
|
||||||
Sets the calculated signature for the given derived image and signer to that specified.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def delete_derived_image(self, derived_image):
|
|
||||||
"""
|
|
||||||
Deletes a derived image and all of its storage.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def set_blob_size(self, blob, size):
|
|
||||||
"""
|
|
||||||
Sets the size field on a blob to the value specified.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_repo_blob_by_digest(self, namespace_name, repo_name, digest):
|
|
||||||
"""
|
|
||||||
Returns the blob with the given digest under the matching repository or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_torrent_info(self, blob):
|
|
||||||
"""
|
|
||||||
Returns the torrent information associated with the given blob or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def set_torrent_info(self, blob, piece_length, pieces):
|
|
||||||
"""
|
|
||||||
Sets the torrent infomation associated with the given blob to that specified.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def lookup_derived_image(self, repo_image, verb, varying_metadata=None):
|
|
||||||
"""
|
|
||||||
Looks up the derived image for the given repository image, verb and optional varying metadata
|
|
||||||
and returns it or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def lookup_or_create_derived_image(self, repo_image, verb, location, varying_metadata=None):
|
|
||||||
"""
|
|
||||||
Looks up the derived image for the given repository image, verb and optional varying metadata
|
|
||||||
and returns it. If none exists, a new derived image is created.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_tag_image(self, namespace_name, repo_name, tag_name):
|
|
||||||
"""
|
|
||||||
Returns the image associated with the live tag with the given name under the matching repository
|
|
||||||
or None if none.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PreOCIModel(VerbsDataInterface):
|
class PreOCIModel(VerbsDataInterface):
|
||||||
|
@ -166,13 +27,11 @@ class PreOCIModel(VerbsDataInterface):
|
||||||
return _repository_for_repo(repo)
|
return _repository_for_repo(repo)
|
||||||
|
|
||||||
def get_manifest_layers_with_blobs(self, repo_image):
|
def get_manifest_layers_with_blobs(self, repo_image):
|
||||||
repo_image_record = model.image.get_image_by_id(repo_image.repository.namespace_name,
|
repo_image_record = model.image.get_image_by_id(
|
||||||
repo_image.repository.name,
|
repo_image.repository.namespace_name, repo_image.repository.name, repo_image.image_id)
|
||||||
repo_image.image_id)
|
|
||||||
|
|
||||||
parents = model.image.get_parent_images_with_placements(repo_image.repository.namespace_name,
|
parents = model.image.get_parent_images_with_placements(
|
||||||
repo_image.repository.name,
|
repo_image.repository.namespace_name, repo_image.repository.name, repo_image_record)
|
||||||
repo_image_record)
|
|
||||||
|
|
||||||
yield repo_image
|
yield repo_image
|
||||||
|
|
||||||
|
@ -190,8 +49,7 @@ class PreOCIModel(VerbsDataInterface):
|
||||||
compat_metadata=metadata,
|
compat_metadata=metadata,
|
||||||
v1_metadata=_docker_v1_metadata(repo_image.repository.namespace_name,
|
v1_metadata=_docker_v1_metadata(repo_image.repository.namespace_name,
|
||||||
repo_image.repository.name, parent),
|
repo_image.repository.name, parent),
|
||||||
internal_db_id=parent.id,
|
internal_db_id=parent.id,)
|
||||||
)
|
|
||||||
|
|
||||||
def get_derived_image_signature(self, derived_image, signer_name):
|
def get_derived_image_signature(self, derived_image, signer_name):
|
||||||
storage = model.storage.get_storage_by_uuid(derived_image.blob.uuid)
|
storage = model.storage.get_storage_by_uuid(derived_image.blob.uuid)
|
||||||
|
@ -239,8 +97,7 @@ class PreOCIModel(VerbsDataInterface):
|
||||||
|
|
||||||
return TorrentInfo(
|
return TorrentInfo(
|
||||||
pieces=torrent_info.pieces,
|
pieces=torrent_info.pieces,
|
||||||
piece_length=torrent_info.piece_length,
|
piece_length=torrent_info.piece_length,)
|
||||||
)
|
|
||||||
|
|
||||||
def set_torrent_info(self, blob, piece_length, pieces):
|
def set_torrent_info(self, blob, piece_length, pieces):
|
||||||
blob_record = model.storage.get_storage_by_uuid(blob.uuid)
|
blob_record = model.storage.get_storage_by_uuid(blob.uuid)
|
||||||
|
@ -277,12 +134,10 @@ class PreOCIModel(VerbsDataInterface):
|
||||||
repository=RepositoryReference(
|
repository=RepositoryReference(
|
||||||
namespace_name=namespace_name,
|
namespace_name=namespace_name,
|
||||||
name=repo_name,
|
name=repo_name,
|
||||||
id=found.repository_id,
|
id=found.repository_id,),
|
||||||
),
|
|
||||||
compat_metadata=metadata,
|
compat_metadata=metadata,
|
||||||
v1_metadata=_docker_v1_metadata(namespace_name, repo_name, found),
|
v1_metadata=_docker_v1_metadata(namespace_name, repo_name, found),
|
||||||
internal_db_id=found.id,
|
internal_db_id=found.id,)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
pre_oci_model = PreOCIModel()
|
pre_oci_model = PreOCIModel()
|
||||||
|
@ -307,8 +162,7 @@ def _docker_v1_metadata(namespace_name, repo_name, repo_image):
|
||||||
|
|
||||||
# Note: These are not needed in verbs and are expensive to load, so we just skip them.
|
# Note: These are not needed in verbs and are expensive to load, so we just skip them.
|
||||||
content_checksum=None,
|
content_checksum=None,
|
||||||
parent_image_id=None,
|
parent_image_id=None,)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _derived_image(blob_record, repo_image):
|
def _derived_image(blob_record, repo_image):
|
||||||
|
@ -318,8 +172,7 @@ def _derived_image(blob_record, repo_image):
|
||||||
return DerivedImage(
|
return DerivedImage(
|
||||||
ref=repo_image.internal_db_id,
|
ref=repo_image.internal_db_id,
|
||||||
blob=_blob(blob_record),
|
blob=_blob(blob_record),
|
||||||
internal_source_image_db_id=repo_image.internal_db_id,
|
internal_source_image_db_id=repo_image.internal_db_id,)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _blob(blob_record):
|
def _blob(blob_record):
|
||||||
|
@ -336,8 +189,8 @@ def _blob(blob_record):
|
||||||
size=blob_record.image_size,
|
size=blob_record.image_size,
|
||||||
uncompressed_size=blob_record.uncompressed_size,
|
uncompressed_size=blob_record.uncompressed_size,
|
||||||
uploading=blob_record.uploading,
|
uploading=blob_record.uploading,
|
||||||
locations=locations,
|
locations=locations,)
|
||||||
)
|
|
||||||
|
|
||||||
def _repository_for_repo(repo):
|
def _repository_for_repo(repo):
|
||||||
""" Returns a Repository object representing the Pre-OCI data model repo instance given. """
|
""" Returns a Repository object representing the Pre-OCI data model repo instance given. """
|
||||||
|
@ -347,5 +200,4 @@ def _repository_for_repo(repo):
|
||||||
namespace_name=repo.namespace_user.username,
|
namespace_name=repo.namespace_user.username,
|
||||||
description=repo.description,
|
description=repo.description,
|
||||||
is_public=model.repository.is_repository_public(repo),
|
is_public=model.repository.is_repository_public(repo),
|
||||||
kind=model.repository.get_repo_kind_name(repo),
|
kind=model.repository.get_repo_kind_name(repo),)
|
||||||
)
|
|
74
endpoints/verbs/test/test_security.py
Normal file
74
endpoints/verbs/test/test_security.py
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flask import url_for
|
||||||
|
from endpoints.test.shared import conduct_call, gen_basic_auth
|
||||||
|
from test.fixtures import *
|
||||||
|
|
||||||
|
NO_ACCESS_USER = 'freshuser'
|
||||||
|
READ_ACCESS_USER = 'reader'
|
||||||
|
ADMIN_ACCESS_USER = 'devtable'
|
||||||
|
CREATOR_ACCESS_USER = 'creator'
|
||||||
|
|
||||||
|
PUBLIC_REPO = 'public/publicrepo'
|
||||||
|
PRIVATE_REPO = 'devtable/shared'
|
||||||
|
ORG_REPO = 'buynlarge/orgrepo'
|
||||||
|
ANOTHER_ORG_REPO = 'buynlarge/anotherorgrepo'
|
||||||
|
|
||||||
|
ACI_ARGS = {
|
||||||
|
'server': 'someserver',
|
||||||
|
'tag': 'fake',
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'x64',}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('user', [
|
||||||
|
(0, None),
|
||||||
|
(1, NO_ACCESS_USER),
|
||||||
|
(2, READ_ACCESS_USER),
|
||||||
|
(3, CREATOR_ACCESS_USER),
|
||||||
|
(4, ADMIN_ACCESS_USER),])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'endpoint,method,repository,single_repo_path,params,expected_statuses',
|
||||||
|
[
|
||||||
|
('get_aci_signature', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)),
|
||||||
|
('get_aci_signature', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
|
||||||
|
('get_aci_signature', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
|
||||||
|
('get_aci_signature', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)),
|
||||||
|
|
||||||
|
# get_aci_image
|
||||||
|
('get_aci_image', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)),
|
||||||
|
('get_aci_image', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
|
||||||
|
('get_aci_image', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
|
||||||
|
('get_aci_image', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)),
|
||||||
|
|
||||||
|
# get_squashed_tag
|
||||||
|
('get_squashed_tag', 'GET', PUBLIC_REPO, False, dict(tag='fake'), (404, 404, 404, 404, 404)),
|
||||||
|
('get_squashed_tag', 'GET', PRIVATE_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)),
|
||||||
|
('get_squashed_tag', 'GET', ORG_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)),
|
||||||
|
('get_squashed_tag', 'GET', ANOTHER_ORG_REPO, False, dict(tag='fake'), (403, 403, 403, 403,
|
||||||
|
404)),
|
||||||
|
|
||||||
|
# get_tag_torrent
|
||||||
|
('get_tag_torrent', 'GET', PUBLIC_REPO, True, dict(digest='sha256:1234'), (404, 404, 404, 404,
|
||||||
|
404)),
|
||||||
|
('get_tag_torrent', 'GET', PRIVATE_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403,
|
||||||
|
404)),
|
||||||
|
('get_tag_torrent', 'GET', ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403,
|
||||||
|
404)),
|
||||||
|
('get_tag_torrent', 'GET', ANOTHER_ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 403,
|
||||||
|
403, 404)),])
|
||||||
|
def test_verbs_security(user, endpoint, method, repository, single_repo_path, params,
|
||||||
|
expected_statuses, app, client):
|
||||||
|
headers = {}
|
||||||
|
if user[1] is not None:
|
||||||
|
headers['Authorization'] = gen_basic_auth(user[1], 'password')
|
||||||
|
|
||||||
|
if single_repo_path:
|
||||||
|
params['repository'] = repository
|
||||||
|
else:
|
||||||
|
(namespace, repo_name) = repository.split('/')
|
||||||
|
params['namespace'] = namespace
|
||||||
|
params['repository'] = repo_name
|
||||||
|
|
||||||
|
conduct_call(client, 'verbs.' + endpoint, url_for, method, params,
|
||||||
|
expected_code=expected_statuses[user[0]], headers=headers)
|
|
@ -1,7 +1,7 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from image.appc import DockerV1ToACIManifestTranslator
|
from image.appc import DockerV1ToACIManifestTranslator
|
||||||
from data.interfaces.verbs import RepositoryReference, ImageWithBlob
|
from endpoints.verbs.models_interface import RepositoryReference, ImageWithBlob
|
||||||
from util.dict_wrappers import JSONPathDict
|
from util.dict_wrappers import JSONPathDict
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,10 @@
|
||||||
"dev": "./node_modules/.bin/karma start --browsers ChromeHeadless",
|
"dev": "./node_modules/.bin/karma start --browsers ChromeHeadless",
|
||||||
"test": "./node_modules/.bin/karma start --single-run --browsers ChromeHeadless",
|
"test": "./node_modules/.bin/karma start --single-run --browsers ChromeHeadless",
|
||||||
"test:node": "JASMINE_CONFIG_PATH=static/test/jasmine.json ./node_modules/.bin/jasmine-ts './static/js/**/*.spec.ts'",
|
"test:node": "JASMINE_CONFIG_PATH=static/test/jasmine.json ./node_modules/.bin/jasmine-ts './static/js/**/*.spec.ts'",
|
||||||
|
"e2e": "./node_modules/.bin/ts-node ./node_modules/.bin/protractor static/test/protractor.conf.ts",
|
||||||
"build": "NODE_ENV=production ./node_modules/.bin/webpack --progress",
|
"build": "NODE_ENV=production ./node_modules/.bin/webpack --progress",
|
||||||
"watch": "./node_modules/.bin/webpack --watch"
|
"watch": "./node_modules/.bin/webpack --watch",
|
||||||
|
"lint": "./node_modules/.bin/tslint --type-check -p tsconfig.json -e **/*.spec.ts"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -53,7 +55,6 @@
|
||||||
"@types/react-dom": "0.14.17",
|
"@types/react-dom": "0.14.17",
|
||||||
"@types/showdown": "^1.4.32",
|
"@types/showdown": "^1.4.32",
|
||||||
"angular-mocks": "1.6.2",
|
"angular-mocks": "1.6.2",
|
||||||
"angular-ts-decorators": "0.0.19",
|
|
||||||
"css-loader": "0.25.0",
|
"css-loader": "0.25.0",
|
||||||
"html-loader": "^0.4.5",
|
"html-loader": "^0.4.5",
|
||||||
"jasmine-core": "^2.5.2",
|
"jasmine-core": "^2.5.2",
|
||||||
|
@ -65,13 +66,15 @@
|
||||||
"karma-jasmine": "^0.3.8",
|
"karma-jasmine": "^0.3.8",
|
||||||
"karma-webpack": "^1.8.1",
|
"karma-webpack": "^1.8.1",
|
||||||
"ngtemplate-loader": "^1.3.1",
|
"ngtemplate-loader": "^1.3.1",
|
||||||
|
"protractor": "^5.1.2",
|
||||||
"script-loader": "^0.7.0",
|
"script-loader": "^0.7.0",
|
||||||
"source-map-loader": "0.1.5",
|
"source-map-loader": "0.1.5",
|
||||||
"style-loader": "0.13.1",
|
"style-loader": "0.13.1",
|
||||||
"ts-loader": "^0.9.5",
|
"ts-loader": "^0.9.5",
|
||||||
"ts-mocks": "^0.2.2",
|
"ts-mocks": "^0.2.2",
|
||||||
|
"ts-node": "^3.0.6",
|
||||||
|
"tslint": "^5.4.3",
|
||||||
"typescript": "^2.2.1",
|
"typescript": "^2.2.1",
|
||||||
"typings": "1.4.0",
|
|
||||||
"webpack": "^2.2"
|
"webpack": "^2.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ RUN curl -L -o /usr/local/bin/prometheus-aggregator https://github.com/coreos/pr
|
||||||
|
|
||||||
# Install front-end dependencies
|
# Install front-end dependencies
|
||||||
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||||
COPY static/ package.json tsconfig.json webpack.config.js typings.json yarn.lock ./
|
COPY static/ package.json tsconfig.json webpack.config.js tslint.json yarn.lock ./
|
||||||
RUN yarn install --ignore-engines
|
RUN yarn install --ignore-engines
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,5 +7,5 @@ export function Inject(value: string) {
|
||||||
return (target: any, propertyKey: string | symbol, parameterIndex: number): void => {
|
return (target: any, propertyKey: string | symbol, parameterIndex: number): void => {
|
||||||
target.$inject = target.$inject = [];
|
target.$inject = target.$inject = [];
|
||||||
target.$inject[parameterIndex] = value;
|
target.$inject[parameterIndex] = value;
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,4 +38,4 @@ export class QuayRequireDirective implements AfterContentInit {
|
||||||
this.$transclude
|
this.$transclude
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,9 @@ import { Input, Component, Inject } from 'ng-metadata/core';
|
||||||
templateUrl: '/static/js/directives/ui/app-public-view/app-public-view.component.html'
|
templateUrl: '/static/js/directives/ui/app-public-view/app-public-view.component.html'
|
||||||
})
|
})
|
||||||
export class AppPublicViewComponent {
|
export class AppPublicViewComponent {
|
||||||
|
|
||||||
@Input('<') public repository: any;
|
@Input('<') public repository: any;
|
||||||
|
|
||||||
private settingsShown: number = 0;
|
private settingsShown: number = 0;
|
||||||
private logsShown: number = 0;
|
private logsShown: number = 0;
|
||||||
|
|
||||||
|
@ -17,11 +19,6 @@ export class AppPublicViewComponent {
|
||||||
this.updateDescription = this.updateDescription.bind(this);
|
this.updateDescription = this.updateDescription.bind(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
private updateDescription(content: string) {
|
|
||||||
this.repository.description = content;
|
|
||||||
this.repository.put();
|
|
||||||
}
|
|
||||||
|
|
||||||
public showSettings(): void {
|
public showSettings(): void {
|
||||||
this.settingsShown++;
|
this.settingsShown++;
|
||||||
}
|
}
|
||||||
|
@ -29,4 +26,9 @@ export class AppPublicViewComponent {
|
||||||
public showLogs(): void {
|
public showLogs(): void {
|
||||||
this.logsShown++;
|
this.logsShown++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private updateDescription(content: string) {
|
||||||
|
this.repository.description = content;
|
||||||
|
this.repository.put();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,4 +44,4 @@ export class ChannelIconComponent {
|
||||||
var num: number = parseInt(hash.substr(0, 4));
|
var num: number = parseInt(hash.substr(0, 4));
|
||||||
return this.colors[num % this.colors.length];
|
return this.colors[num % this.colors.length];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,4 +60,4 @@ export class ClipboardCopyDirective implements AfterContentInit, OnDestroy {
|
||||||
this.clipboard.destroy();
|
this.clipboard.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,11 +12,12 @@ describe("CorTabPaneComponent", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
activeTab = new BehaviorSubject<string>(null);
|
activeTab = new BehaviorSubject<string>(null);
|
||||||
spyOn(activeTab, "subscribe").and.returnValue(null);
|
spyOn(activeTab, "subscribe").and.callThrough();
|
||||||
panelMock = new Mock<CorTabPanelComponent>();
|
panelMock = new Mock<CorTabPanelComponent>();
|
||||||
panelMock.setup(mock => mock.activeTab).is(activeTab);
|
panelMock.setup(mock => mock.activeTab).is(activeTab);
|
||||||
|
|
||||||
component = new CorTabPaneComponent(panelMock.Object);
|
component = new CorTabPaneComponent(panelMock.Object);
|
||||||
|
component.id = 'description';
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("ngOnInit", () => {
|
describe("ngOnInit", () => {
|
||||||
|
@ -36,5 +37,27 @@ describe("CorTabPaneComponent", () => {
|
||||||
|
|
||||||
expect((<Spy>panelMock.Object.activeTab.subscribe)).toHaveBeenCalled();
|
expect((<Spy>panelMock.Object.activeTab.subscribe)).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("does nothing if active tab ID is undefined", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
component.isActiveTab = true;
|
||||||
|
panelMock.Object.activeTab.next(null);
|
||||||
|
|
||||||
|
expect(component.isActiveTab).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("sets self as active if active tab ID matches tab ID", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
panelMock.Object.activeTab.next(component.id);
|
||||||
|
|
||||||
|
expect(component.isActiveTab).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("sets self as inactive if active tab ID does not match tab ID", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
panelMock.Object.activeTab.next(component.id.split('').reverse().join(''));
|
||||||
|
|
||||||
|
expect(component.isActiveTab).toEqual(false);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { Component, Input, Inject, Host, OnInit } from 'ng-metadata/core';
|
import { Component, Input, Inject, Host, OnInit } from 'ng-metadata/core';
|
||||||
import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component';
|
import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component';
|
||||||
|
import 'rxjs/add/operator/filter';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -16,7 +17,7 @@ export class CorTabPaneComponent implements OnInit {
|
||||||
|
|
||||||
@Input('@') public id: string;
|
@Input('@') public id: string;
|
||||||
|
|
||||||
private isActiveTab: boolean = false;
|
public isActiveTab: boolean = false;
|
||||||
|
|
||||||
constructor(@Host() @Inject(CorTabPanelComponent) private panel: CorTabPanelComponent) {
|
constructor(@Host() @Inject(CorTabPanelComponent) private panel: CorTabPanelComponent) {
|
||||||
|
|
||||||
|
@ -25,8 +26,10 @@ export class CorTabPaneComponent implements OnInit {
|
||||||
public ngOnInit(): void {
|
public ngOnInit(): void {
|
||||||
this.panel.addTabPane(this);
|
this.panel.addTabPane(this);
|
||||||
|
|
||||||
this.panel.activeTab.subscribe((tabId: string) => {
|
this.panel.activeTab
|
||||||
this.isActiveTab = (this.id === tabId);
|
.filter(tabId => tabId != undefined)
|
||||||
});
|
.subscribe((tabId: string) => {
|
||||||
|
this.isActiveTab = (this.id === tabId);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,9 +12,16 @@ describe("CorTabPanelComponent", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("ngOnInit", () => {
|
describe("ngOnInit", () => {
|
||||||
|
var tabs: CorTabComponent[] = [];
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
spyOn(component.activeTab, "subscribe").and.returnValue(null);
|
// Add tabs to panel
|
||||||
|
tabs.push(new CorTabComponent(component));
|
||||||
|
tabs[0].tabId = "info";
|
||||||
|
tabs.forEach((tab) => component.addTab(tab));
|
||||||
|
|
||||||
|
spyOn(component.activeTab, "subscribe").and.callThrough();
|
||||||
|
spyOn(component.activeTab, "next").and.callThrough();
|
||||||
spyOn(component.tabChange, "emit").and.returnValue(null);
|
spyOn(component.tabChange, "emit").and.returnValue(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -24,12 +31,26 @@ describe("CorTabPanelComponent", () => {
|
||||||
expect(<Spy>component.activeTab.subscribe).toHaveBeenCalled();
|
expect(<Spy>component.activeTab.subscribe).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("emits output event for tab change when ", () => {
|
it("emits next active tab with tab ID of first registered tab if given tab ID is null", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
component.activeTab.next(null);
|
||||||
|
|
||||||
|
expect((<Spy>component.activeTab.next).calls.argsFor(1)[0]).toEqual(tabs[0].tabId);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not emit output event for tab change if tab ID is null", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
component.activeTab.next(null);
|
||||||
|
|
||||||
|
expect((<Spy>component.tabChange.emit).calls.allArgs).not.toContain(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("emits output event for tab change when tab ID is not null", () => {
|
||||||
component.ngOnInit();
|
component.ngOnInit();
|
||||||
const tabId: string = "description";
|
const tabId: string = "description";
|
||||||
(<Spy>component.activeTab.subscribe).calls.argsFor(0)[0](tabId);
|
component.activeTab.next(tabId);
|
||||||
|
|
||||||
expect((<Spy>component.tabChange.emit).calls.argsFor(0)[0]).toEqual(tabId);
|
expect((<Spy>component.tabChange.emit).calls.argsFor(1)[0]).toEqual(tabId);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,12 @@ export class CorTabPanelComponent implements OnInit, OnChanges {
|
||||||
|
|
||||||
public ngOnInit(): void {
|
public ngOnInit(): void {
|
||||||
this.activeTab.subscribe((tabId: string) => {
|
this.activeTab.subscribe((tabId: string) => {
|
||||||
this.tabChange.emit(tabId);
|
// Catch null values and replace with tabId of first tab
|
||||||
|
if (!tabId && this.tabs[0]) {
|
||||||
|
this.activeTab.next(this.tabs[0].tabId);
|
||||||
|
} else {
|
||||||
|
this.tabChange.emit(tabId);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ describe("CorTabComponent", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
activeTab = new BehaviorSubject<string>(null);
|
activeTab = new BehaviorSubject<string>(null);
|
||||||
spyOn(activeTab, "subscribe").and.returnValue(null);
|
spyOn(activeTab, "subscribe").and.callThrough();
|
||||||
panelMock = new Mock<CorTabPanelComponent>();
|
panelMock = new Mock<CorTabPanelComponent>();
|
||||||
panelMock.setup(mock => mock.activeTab).is(activeTab);
|
panelMock.setup(mock => mock.activeTab).is(activeTab);
|
||||||
|
|
||||||
|
@ -35,16 +35,25 @@ describe("CorTabComponent", () => {
|
||||||
expect((<Spy>panelMock.Object.activeTab.subscribe)).toHaveBeenCalled();
|
expect((<Spy>panelMock.Object.activeTab.subscribe)).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("does nothing if active tab ID is undefined", () => {
|
||||||
|
component.ngOnInit();
|
||||||
|
panelMock.Object.activeTab.next(null);
|
||||||
|
|
||||||
|
expect(<Spy>component.tabInit.emit).not.toHaveBeenCalled();
|
||||||
|
expect(<Spy>component.tabShow.emit).not.toHaveBeenCalled();
|
||||||
|
expect(<Spy>component.tabHide.emit).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
it("emits output event for tab init if it is new active tab", () => {
|
it("emits output event for tab init if it is new active tab", () => {
|
||||||
component.ngOnInit();
|
component.ngOnInit();
|
||||||
(<Spy>panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId);
|
panelMock.Object.activeTab.next(component.tabId);
|
||||||
|
|
||||||
expect(<Spy>component.tabInit.emit).toHaveBeenCalled();
|
expect(<Spy>component.tabInit.emit).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("emits output event for tab show if it is new active tab", () => {
|
it("emits output event for tab show if it is new active tab", () => {
|
||||||
component.ngOnInit();
|
component.ngOnInit();
|
||||||
(<Spy>panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId);
|
panelMock.Object.activeTab.next(component.tabId);
|
||||||
|
|
||||||
expect(<Spy>component.tabShow.emit).toHaveBeenCalled();
|
expect(<Spy>component.tabShow.emit).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@ -53,8 +62,8 @@ describe("CorTabComponent", () => {
|
||||||
const newTabId: string = component.tabId.split('').reverse().join('');
|
const newTabId: string = component.tabId.split('').reverse().join('');
|
||||||
component.ngOnInit();
|
component.ngOnInit();
|
||||||
// Call twice, first time to set 'isActive' to true
|
// Call twice, first time to set 'isActive' to true
|
||||||
(<Spy>panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId);
|
panelMock.Object.activeTab.next(component.tabId);
|
||||||
(<Spy>panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](newTabId);
|
panelMock.Object.activeTab.next(newTabId);
|
||||||
|
|
||||||
expect(<Spy>component.tabHide.emit).toHaveBeenCalled();
|
expect(<Spy>component.tabHide.emit).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@ -62,7 +71,7 @@ describe("CorTabComponent", () => {
|
||||||
it("does not emit output event for tab hide if was not previously active tab", () => {
|
it("does not emit output event for tab hide if was not previously active tab", () => {
|
||||||
const newTabId: string = component.tabId.split('').reverse().join('');
|
const newTabId: string = component.tabId.split('').reverse().join('');
|
||||||
component.ngOnInit();
|
component.ngOnInit();
|
||||||
(<Spy>panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](newTabId);
|
panelMock.Object.activeTab.next(newTabId);
|
||||||
|
|
||||||
expect(<Spy>component.tabHide.emit).not.toHaveBeenCalled();
|
expect(<Spy>component.tabHide.emit).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { Component, Input, Output, Inject, EventEmitter, Host, OnInit } from 'ng-metadata/core';
|
import { Component, Input, Output, Inject, EventEmitter, Host, OnInit } from 'ng-metadata/core';
|
||||||
import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component';
|
import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component';
|
||||||
|
import 'rxjs/add/operator/filter';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,16 +29,18 @@ export class CorTabComponent implements OnInit {
|
||||||
}
|
}
|
||||||
|
|
||||||
public ngOnInit(): void {
|
public ngOnInit(): void {
|
||||||
this.panel.activeTab.subscribe((tabId: string) => {
|
this.panel.activeTab
|
||||||
if (!this.isActive && this.tabId === tabId) {
|
.filter(tabId => tabId != undefined)
|
||||||
this.isActive = true;
|
.subscribe((tabId: string) => {
|
||||||
this.tabInit.emit({});
|
if (!this.isActive && this.tabId === tabId) {
|
||||||
this.tabShow.emit({});
|
this.isActive = true;
|
||||||
} else if (this.isActive && this.tabId !== tabId) {
|
this.tabInit.emit({});
|
||||||
this.isActive = false;
|
this.tabShow.emit({});
|
||||||
this.tabHide.emit({});
|
} else if (this.isActive && this.tabId !== tabId) {
|
||||||
}
|
this.isActive = false;
|
||||||
});
|
this.tabHide.emit({});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
this.panel.addTab(this);
|
this.panel.addTab(this);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { NgModule } from 'ng-metadata/core'
|
import { NgModule } from 'ng-metadata/core';
|
||||||
import { CorTabsComponent } from './cor-tabs.component';
|
import { CorTabsComponent } from './cor-tabs.component';
|
||||||
import { CorTabComponent } from './cor-tab/cor-tab.component';
|
import { CorTabComponent } from './cor-tab/cor-tab.component';
|
||||||
import { CorNavTabsDirective } from './cor-nav-tabs/cor-nav-tabs.directive';
|
import { CorNavTabsDirective } from './cor-nav-tabs/cor-nav-tabs.directive';
|
||||||
|
|
13
static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts
Normal file
13
static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
import { element, by, browser, $, ElementFinder, ExpectedConditions as until } from 'protractor';
|
||||||
|
|
||||||
|
|
||||||
|
export class CorTabsViewObject {
|
||||||
|
|
||||||
|
public selectTabByTitle(title: string): Promise<void> {
|
||||||
|
return Promise.resolve($(`cor-tab[tab-title="${title}"] a`).click());
|
||||||
|
}
|
||||||
|
|
||||||
|
public isActiveTab(title: string): Promise<boolean> {
|
||||||
|
return Promise.resolve($(`cor-tab[tab-title="${title}"] .cor-tab-itself.active`).isPresent());
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,7 @@
|
||||||
import { Input, Output, Component, Inject } from 'ng-metadata/core';
|
import { Input, Component, Inject } from 'ng-metadata/core';
|
||||||
import * as moment from "moment";
|
import * as moment from "moment";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A component that allows for selecting a time duration.
|
* A component that allows for selecting a time duration.
|
||||||
*/
|
*/
|
||||||
|
@ -9,6 +10,7 @@ import * as moment from "moment";
|
||||||
templateUrl: '/static/js/directives/ui/duration-input/duration-input.component.html'
|
templateUrl: '/static/js/directives/ui/duration-input/duration-input.component.html'
|
||||||
})
|
})
|
||||||
export class DurationInputComponent implements ng.IComponentController {
|
export class DurationInputComponent implements ng.IComponentController {
|
||||||
|
|
||||||
@Input('<') public min: string;
|
@Input('<') public min: string;
|
||||||
@Input('<') public max: string;
|
@Input('<') public max: string;
|
||||||
@Input('=?') public value: string;
|
@Input('=?') public value: string;
|
||||||
|
@ -17,7 +19,7 @@ export class DurationInputComponent implements ng.IComponentController {
|
||||||
private min_s: number;
|
private min_s: number;
|
||||||
private max_s: number;
|
private max_s: number;
|
||||||
|
|
||||||
constructor (@Inject('$scope') private $scope: ng.IScope) {
|
constructor(@Inject('$scope') private $scope: ng.IScope) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +35,7 @@ export class DurationInputComponent implements ng.IComponentController {
|
||||||
}
|
}
|
||||||
|
|
||||||
private updateValue(): void {
|
private updateValue(): void {
|
||||||
this.value = this.seconds + 's';
|
this.value = `${this.seconds}s`;
|
||||||
}
|
}
|
||||||
|
|
||||||
private refresh(): void {
|
private refresh(): void {
|
||||||
|
@ -41,8 +43,8 @@ export class DurationInputComponent implements ng.IComponentController {
|
||||||
this.max_s = this.toSeconds(this.max || '1h');
|
this.max_s = this.toSeconds(this.max || '1h');
|
||||||
|
|
||||||
if (this.value) {
|
if (this.value) {
|
||||||
this.seconds = this.toSeconds(this.value || '0s')
|
this.seconds = this.toSeconds(this.value || '0s');
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private durationExplanation(durationSeconds: string): string {
|
private durationExplanation(durationSeconds: string): string {
|
||||||
|
|
|
@ -75,4 +75,4 @@ export class LinearWorkflowComponent {
|
||||||
export type SectionInfo = {
|
export type SectionInfo = {
|
||||||
index: number;
|
index: number;
|
||||||
component: LinearWorkflowSectionComponent;
|
component: LinearWorkflowSectionComponent;
|
||||||
}
|
};
|
||||||
|
|
|
@ -17,7 +17,9 @@ export class ManageTriggerComponent implements OnChanges {
|
||||||
@Input('<') public githost: string = 'custom-git';
|
@Input('<') public githost: string = 'custom-git';
|
||||||
@Input('<') public repository: Repository;
|
@Input('<') public repository: Repository;
|
||||||
@Input('<') public trigger: Trigger;
|
@Input('<') public trigger: Trigger;
|
||||||
|
|
||||||
@Output() public activateTrigger: EventEmitter<{config: TriggerConfig, pull_robot?: any}> = new EventEmitter();
|
@Output() public activateTrigger: EventEmitter<{config: TriggerConfig, pull_robot?: any}> = new EventEmitter();
|
||||||
|
|
||||||
public config: TriggerConfig;
|
public config: TriggerConfig;
|
||||||
public local: Local = {
|
public local: Local = {
|
||||||
selectedRepository: {name: ''},
|
selectedRepository: {name: ''},
|
||||||
|
@ -28,6 +30,7 @@ export class ManageTriggerComponent implements OnChanges {
|
||||||
repositoryOptions: {filter: '', predicate: 'score', reverse: false, page: 0, hideStale: true},
|
repositoryOptions: {filter: '', predicate: 'score', reverse: false, page: 0, hideStale: true},
|
||||||
robotOptions: {filter: '', predicate: 'score', reverse: false, page: 0},
|
robotOptions: {filter: '', predicate: 'score', reverse: false, page: 0},
|
||||||
};
|
};
|
||||||
|
|
||||||
private namespacesPerPage: number = 10;
|
private namespacesPerPage: number = 10;
|
||||||
private repositoriesPerPage: number = 10;
|
private repositoriesPerPage: number = 10;
|
||||||
private robotsPerPage: number = 10;
|
private robotsPerPage: number = 10;
|
||||||
|
@ -174,7 +177,7 @@ export class ManageTriggerComponent implements OnChanges {
|
||||||
}
|
}
|
||||||
|
|
||||||
private setPossibleContexts(path: string) {
|
private setPossibleContexts(path: string) {
|
||||||
if (this.local.dockerfileLocations.contextMap){
|
if (this.local.dockerfileLocations.contextMap) {
|
||||||
this.local.contexts = this.local.dockerfileLocations.contextMap[path] || [];
|
this.local.contexts = this.local.dockerfileLocations.contextMap[path] || [];
|
||||||
} else {
|
} else {
|
||||||
this.local.contexts = [path.split('/').slice(0, -1).join('/').concat('/')];
|
this.local.contexts = [path.split('/').slice(0, -1).join('/').concat('/')];
|
||||||
|
@ -288,7 +291,7 @@ export class ManageTriggerComponent implements OnChanges {
|
||||||
const kind = ref.kind == 'branch' ? 'heads' : 'tags';
|
const kind = ref.kind == 'branch' ? 'heads' : 'tags';
|
||||||
const icon = ref.kind == 'branch' ? 'fa-code-fork' : 'fa-tag';
|
const icon = ref.kind == 'branch' ? 'fa-code-fork' : 'fa-tag';
|
||||||
return {
|
return {
|
||||||
'value': kind + '/' + ref.name,
|
'value': `${kind}/${ref.name}`,
|
||||||
'icon': icon,
|
'icon': icon,
|
||||||
'title': ref.name
|
'title': ref.name
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
import { element, by, browser, $, ElementFinder, ExpectedConditions as until } from 'protractor';
|
||||||
|
|
||||||
|
|
||||||
|
export class ManageTriggerViewObject {
|
||||||
|
|
||||||
|
public sections: {[name: string]: ElementFinder} = {
|
||||||
|
namespace: $('linear-workflow-section[section-id=namespace]'),
|
||||||
|
githostrepo: $('linear-workflow-section[section-id=repo][section-title="Select Repository"]'),
|
||||||
|
customrepo: $('linear-workflow-section[section-id=repo][section-title="Git Repository"]'),
|
||||||
|
triggeroptions: $('linear-workflow-section[section-id=triggeroptions]'),
|
||||||
|
dockerfilelocation: $('linear-workflow-section[section-id=dockerfilelocation]'),
|
||||||
|
contextlocation: $('linear-workflow-section[section-id=contextlocation]'),
|
||||||
|
robot: $('linear-workflow-section[section-id=robot]'),
|
||||||
|
verification: $('linear-workflow-section[section-id=verification]'),
|
||||||
|
};
|
||||||
|
|
||||||
|
private customGitRepoInput: ElementFinder = element(by.model('$ctrl.buildSource'));
|
||||||
|
private dockerfileLocationInput: ElementFinder = this.sections['dockerfilelocation'].$('input');
|
||||||
|
private dockerfileLocationDropdownButton: ElementFinder = this.sections['dockerfilelocation']
|
||||||
|
.$('button[data-toggle=dropdown');
|
||||||
|
private dockerContextInput: ElementFinder = this.sections['contextlocation'].$('input');
|
||||||
|
private dockerContextDropdownButton: ElementFinder = this.sections['contextlocation']
|
||||||
|
.$('button[data-toggle=dropdown');
|
||||||
|
private robotAccountOptions: ElementFinder = this.sections['robot']
|
||||||
|
.element(by.repeater('$ctrl.orderedData.visibleEntries'));
|
||||||
|
|
||||||
|
public continue(): Promise<void> {
|
||||||
|
return Promise.resolve(element(by.buttonText('Continue')).click());
|
||||||
|
}
|
||||||
|
|
||||||
|
public enterRepositoryURL(url: string): Promise<void> {
|
||||||
|
browser.wait(until.presenceOf(this.customGitRepoInput));
|
||||||
|
this.customGitRepoInput.clear();
|
||||||
|
|
||||||
|
return Promise.resolve(this.customGitRepoInput.sendKeys(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
public enterDockerfileLocation(path: string): Promise<void> {
|
||||||
|
browser.wait(until.presenceOf(this.dockerfileLocationInput));
|
||||||
|
this.dockerfileLocationInput.clear();
|
||||||
|
|
||||||
|
return Promise.resolve(this.dockerfileLocationInput.sendKeys(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
public getDockerfileSuggestions(): Promise<string[]> {
|
||||||
|
return Promise.resolve(this.dockerfileLocationDropdownButton.click())
|
||||||
|
.then(() => element.all(by.repeater('$ctrl.paths')).map(result => result.getText()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public enterDockerContext(path: string): Promise<void> {
|
||||||
|
browser.wait(until.presenceOf(this.dockerContextInput));
|
||||||
|
this.dockerContextInput.clear();
|
||||||
|
|
||||||
|
return Promise.resolve(this.dockerContextInput.sendKeys(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
public getDockerContextSuggestions(): Promise<string[]> {
|
||||||
|
return Promise.resolve(this.dockerContextDropdownButton.click())
|
||||||
|
.then(() => element.all(by.repeater('$ctrl.contexts')).map(result => result.getText()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public selectRobotAccount(index: number): Promise<void> {
|
||||||
|
return Promise.resolve(element.all(by.css('input[type=radio]')).get(index).click());
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,8 +16,10 @@ export class MarkdownEditorComponent {
|
||||||
@Input('<') public content: string;
|
@Input('<') public content: string;
|
||||||
@Output() public save: EventEmitter<{editedContent: string}> = new EventEmitter();
|
@Output() public save: EventEmitter<{editedContent: string}> = new EventEmitter();
|
||||||
@Output() public discard: EventEmitter<any> = new EventEmitter();
|
@Output() public discard: EventEmitter<any> = new EventEmitter();
|
||||||
|
|
||||||
// Textarea is public for testability, should not be directly accessed
|
// Textarea is public for testability, should not be directly accessed
|
||||||
@ViewChild('#markdown-textarea') public textarea: ng.IAugmentedJQuery;
|
@ViewChild('#markdown-textarea') public textarea: ng.IAugmentedJQuery;
|
||||||
|
|
||||||
private editMode: EditMode = "write";
|
private editMode: EditMode = "write";
|
||||||
|
|
||||||
constructor(@Inject('$document') private $document: ng.IDocumentService,
|
constructor(@Inject('$document') private $document: ng.IDocumentService,
|
||||||
|
@ -115,9 +117,9 @@ export class MarkdownEditorComponent {
|
||||||
private insertText(text: string, startPos: number, endPos: number): void {
|
private insertText(text: string, startPos: number, endPos: number): void {
|
||||||
if (this.browserPlatform === 'firefox') {
|
if (this.browserPlatform === 'firefox') {
|
||||||
// FIXME: Ctrl-Z highlights previous text
|
// FIXME: Ctrl-Z highlights previous text
|
||||||
this.textarea.val(this.textarea.val().substr(0, startPos) +
|
this.textarea.val(<string>this.textarea.val().substr(0, startPos) +
|
||||||
text +
|
text +
|
||||||
this.textarea.val().substr(endPos, this.textarea.val().length));
|
<string>this.textarea.val().substr(endPos, this.textarea.val().length));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// TODO: Test other platforms (IE...)
|
// TODO: Test other platforms (IE...)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { Input, Component, Inject } from 'ng-metadata/core';
|
import { Input, Component, Inject } from 'ng-metadata/core';
|
||||||
import { Repository } from '../../../types/common.types';
|
import { Repository } from '../../../types/common.types';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A component that displays the configuration and options for repository signing.
|
* A component that displays the configuration and options for repository signing.
|
||||||
*/
|
*/
|
||||||
|
@ -9,12 +10,13 @@ import { Repository } from '../../../types/common.types';
|
||||||
templateUrl: '/static/js/directives/ui/repository-signing-config/repository-signing-config.component.html',
|
templateUrl: '/static/js/directives/ui/repository-signing-config/repository-signing-config.component.html',
|
||||||
})
|
})
|
||||||
export class RepositorySigningConfigComponent {
|
export class RepositorySigningConfigComponent {
|
||||||
|
|
||||||
@Input('<') public repository: Repository;
|
@Input('<') public repository: Repository;
|
||||||
|
|
||||||
private enableTrustInfo: {[key: string]: string} = null;
|
private enableTrustInfo: {[key: string]: string} = null;
|
||||||
private disableTrustInfo: {[key: string]: string} = null;
|
private disableTrustInfo: {[key: string]: string} = null;
|
||||||
|
|
||||||
constructor (@Inject("ApiService") private ApiService: any) {
|
constructor(@Inject("ApiService") private ApiService: any) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,4 +43,4 @@ export class RepositorySigningConfigComponent {
|
||||||
callback(true);
|
callback(true);
|
||||||
}, errorDisplay);
|
}, errorDisplay);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ export class SearchBoxComponent {
|
||||||
private onSelected($event): void {
|
private onSelected($event): void {
|
||||||
this.autocompleteSelected = true;
|
this.autocompleteSelected = true;
|
||||||
this.$timeout(() => {
|
this.$timeout(() => {
|
||||||
this.$location.url($event['result']['href'])
|
this.$location.url($event['result']['href']);
|
||||||
}, 100);
|
}, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,4 +54,4 @@ export class SearchBoxComponent {
|
||||||
this.$location.search('q', $event['value']);
|
this.$location.search('q', $event['value']);
|
||||||
}, 10);
|
}, 10);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { Input, Component, Inject } from 'ng-metadata/core';
|
||||||
import { ApostilleDelegationsSet, ApostilleSignatureDocument, ApostilleTagDocument } from '../../../types/common.types';
|
import { ApostilleDelegationsSet, ApostilleSignatureDocument, ApostilleTagDocument } from '../../../types/common.types';
|
||||||
import * as moment from "moment";
|
import * as moment from "moment";
|
||||||
|
|
||||||
|
|
||||||
type TagSigningInfo = {
|
type TagSigningInfo = {
|
||||||
delegations: DelegationInfo[];
|
delegations: DelegationInfo[];
|
||||||
delegationsByName: {[delegationName: string]: DelegationInfo};
|
delegationsByName: {[delegationName: string]: DelegationInfo};
|
||||||
|
@ -9,7 +10,8 @@ type TagSigningInfo = {
|
||||||
hasExpiringSoon: boolean;
|
hasExpiringSoon: boolean;
|
||||||
hasExpired: boolean;
|
hasExpired: boolean;
|
||||||
hasInvalid: boolean;
|
hasInvalid: boolean;
|
||||||
}
|
};
|
||||||
|
|
||||||
|
|
||||||
type DelegationInfo = {
|
type DelegationInfo = {
|
||||||
delegationName: string;
|
delegationName: string;
|
||||||
|
@ -20,7 +22,9 @@ type DelegationInfo = {
|
||||||
isExpiringSoon: boolean
|
isExpiringSoon: boolean
|
||||||
};
|
};
|
||||||
|
|
||||||
var RELEASES = ['targets/releases', 'targets'];
|
|
||||||
|
const RELEASES = ['targets/releases', 'targets'];
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A component that displays the signing status of a tag in the repository view.
|
* A component that displays the signing status of a tag in the repository view.
|
||||||
|
@ -30,13 +34,16 @@ var RELEASES = ['targets/releases', 'targets'];
|
||||||
templateUrl: '/static/js/directives/ui/tag-signing-display/tag-signing-display.component.html',
|
templateUrl: '/static/js/directives/ui/tag-signing-display/tag-signing-display.component.html',
|
||||||
})
|
})
|
||||||
export class TagSigningDisplayComponent {
|
export class TagSigningDisplayComponent {
|
||||||
|
|
||||||
@Input('<') public compact: boolean;
|
@Input('<') public compact: boolean;
|
||||||
@Input('<') public tag: any;
|
@Input('<') public tag: any;
|
||||||
@Input('<') public delegations: ApostilleDelegationsSet;
|
@Input('<') public delegations: ApostilleDelegationsSet;
|
||||||
|
|
||||||
private cachedSigningInfo: TagSigningInfo | null = null;
|
private cachedSigningInfo: TagSigningInfo | null = null;
|
||||||
|
|
||||||
constructor(@Inject("$sanitize") private $sanitize: ng.sanitize.ISanitizeService) {}
|
constructor(@Inject("$sanitize") private $sanitize: ng.sanitize.ISanitizeService) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private base64ToHex(base64String: string): string {
|
private base64ToHex(base64String: string): string {
|
||||||
// Based on: http://stackoverflow.com/questions/39460182/decode-base64-to-hexadecimal-string-with-javascript
|
// Based on: http://stackoverflow.com/questions/39460182/decode-base64-to-hexadecimal-string-with-javascript
|
||||||
|
@ -49,13 +56,15 @@ export class TagSigningDisplayComponent {
|
||||||
var hexString = '';
|
var hexString = '';
|
||||||
for (var i = 0; i < raw.length; ++i) {
|
for (var i = 0; i < raw.length; ++i) {
|
||||||
var char = raw.charCodeAt(i);
|
var char = raw.charCodeAt(i);
|
||||||
var hex = char.toString(16)
|
var hex = char.toString(16);
|
||||||
hexString += (hex.length == 2 ? hex : '0' + hex);
|
hexString += (hex.length == 2 ? hex : '0' + hex);
|
||||||
}
|
}
|
||||||
return hexString;
|
return hexString;
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildDelegationInfo(tag: any, delegationName: string, delegation: ApostilleSignatureDocument): DelegationInfo {
|
private buildDelegationInfo(tag: any,
|
||||||
|
delegationName: string,
|
||||||
|
delegation: ApostilleSignatureDocument): DelegationInfo {
|
||||||
var digest_without_prefix = tag.manifest_digest.substr('sha256:'.length);
|
var digest_without_prefix = tag.manifest_digest.substr('sha256:'.length);
|
||||||
var hex_signature = this.base64ToHex(delegation.targets[tag.name].hashes['sha256']);
|
var hex_signature = this.base64ToHex(delegation.targets[tag.name].hashes['sha256']);
|
||||||
|
|
||||||
|
@ -70,7 +79,7 @@ export class TagSigningDisplayComponent {
|
||||||
'delegationHash': hex_signature,
|
'delegationHash': hex_signature,
|
||||||
'isExpired': expires.isSameOrBefore(now),
|
'isExpired': expires.isSameOrBefore(now),
|
||||||
'isExpiringSoon': !expires.isSameOrBefore(now) && expires.isSameOrBefore(withOneWeek),
|
'isExpiringSoon': !expires.isSameOrBefore(now) && expires.isSameOrBefore(withOneWeek),
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildTagSigningInfo(tag: any, delegationSet: ApostilleDelegationsSet): TagSigningInfo {
|
private buildTagSigningInfo(tag: any, delegationSet: ApostilleDelegationsSet): TagSigningInfo {
|
||||||
|
@ -80,13 +89,13 @@ export class TagSigningDisplayComponent {
|
||||||
'hasExpired': false,
|
'hasExpired': false,
|
||||||
'hasExpiringSoon': false,
|
'hasExpiringSoon': false,
|
||||||
'hasInvalid': false,
|
'hasInvalid': false,
|
||||||
}
|
};
|
||||||
|
|
||||||
// Find all delegations containing the tag as a target.
|
// Find all delegations containing the tag as a target.
|
||||||
Object.keys(delegationSet.delegations).forEach((delegationName) => {
|
Object.keys(delegationSet.delegations).forEach((delegationName) => {
|
||||||
var delegation = delegationSet.delegations[delegationName];
|
var delegation = delegationSet.delegations[delegationName];
|
||||||
if (delegation.targets[tag.name]) {
|
if (delegation.targets[tag.name]) {
|
||||||
var DelegationInfo = this.buildDelegationInfo(tag, delegationName, delegation)
|
var DelegationInfo = this.buildDelegationInfo(tag, delegationName, delegation);
|
||||||
info.delegations.push(DelegationInfo);
|
info.delegations.push(DelegationInfo);
|
||||||
info.delegationsByName[delegationName] = DelegationInfo;
|
info.delegationsByName[delegationName] = DelegationInfo;
|
||||||
|
|
||||||
|
@ -173,4 +182,4 @@ export class TagSigningDisplayComponent {
|
||||||
|
|
||||||
return 'invalid-signed';
|
return 'invalid-signed';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { Input, Component, Inject } from 'ng-metadata/core';
|
import { Input, Component, Inject } from 'ng-metadata/core';
|
||||||
import * as moment from "moment";
|
import * as moment from "moment";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A component that displays settings for a namespace for time machine.
|
* A component that displays settings for a namespace for time machine.
|
||||||
*/
|
*/
|
||||||
|
@ -9,6 +10,7 @@ import * as moment from "moment";
|
||||||
templateUrl: '/static/js/directives/ui/time-machine-settings/time-machine-settings.component.html'
|
templateUrl: '/static/js/directives/ui/time-machine-settings/time-machine-settings.component.html'
|
||||||
})
|
})
|
||||||
export class TimeMachineSettingsComponent implements ng.IComponentController {
|
export class TimeMachineSettingsComponent implements ng.IComponentController {
|
||||||
|
|
||||||
@Input('<') public user: any;
|
@Input('<') public user: any;
|
||||||
@Input('<') public organization: any;
|
@Input('<') public organization: any;
|
||||||
|
|
||||||
|
@ -16,7 +18,7 @@ export class TimeMachineSettingsComponent implements ng.IComponentController {
|
||||||
private current_s: number;
|
private current_s: number;
|
||||||
private updating: boolean;
|
private updating: boolean;
|
||||||
|
|
||||||
constructor (@Inject('Config') private Config: any, @Inject('ApiService') private ApiService: any,
|
constructor(@Inject('Config') private Config: any, @Inject('ApiService') private ApiService: any,
|
||||||
@Inject('Features') private Features: any) {
|
@Inject('Features') private Features: any) {
|
||||||
this.current_s = 0;
|
this.current_s = 0;
|
||||||
this.initial_s = 0;
|
this.initial_s = 0;
|
||||||
|
@ -51,7 +53,7 @@ export class TimeMachineSettingsComponent implements ng.IComponentController {
|
||||||
this.updating = true;
|
this.updating = true;
|
||||||
var errorDisplay = this.ApiService.errorDisplay('Could not update time machine setting', () => {
|
var errorDisplay = this.ApiService.errorDisplay('Could not update time machine setting', () => {
|
||||||
this.updating = false;
|
this.updating = false;
|
||||||
})
|
});
|
||||||
|
|
||||||
var method = (this.user ? this.ApiService.changeUserDetails :
|
var method = (this.user ? this.ApiService.changeUserDetails :
|
||||||
this.ApiService.changeOrganizationDetails);
|
this.ApiService.changeOrganizationDetails);
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { Input, Output, Directive, Inject, AfterContentInit, EventEmitter, HostListener } from 'ng-metadata/core';
|
import { Input, Output, Directive, Inject, AfterContentInit, EventEmitter, HostListener } from 'ng-metadata/core';
|
||||||
import * as $ from 'jquery';
|
import * as $ from 'jquery';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Directive which decorates an <input> with a typeahead autocomplete.
|
* Directive which decorates an <input> with a typeahead autocomplete.
|
||||||
*/
|
*/
|
||||||
|
@ -8,15 +9,15 @@ import * as $ from 'jquery';
|
||||||
selector: '[typeahead]',
|
selector: '[typeahead]',
|
||||||
})
|
})
|
||||||
export class TypeaheadDirective implements AfterContentInit {
|
export class TypeaheadDirective implements AfterContentInit {
|
||||||
@Output('typeahead') typeahead = new EventEmitter<any>();
|
|
||||||
|
|
||||||
@Input('taDisplayKey') displayKey: string = '';
|
@Input('taDisplayKey') public displayKey: string = '';
|
||||||
@Input('taSuggestionTmpl') suggestionTemplate: string = '';
|
@Input('taSuggestionTmpl') public suggestionTemplate: string = '';
|
||||||
@Input('taClearOnSelect') clearOnSelect: boolean = false;
|
@Input('taClearOnSelect') public clearOnSelect: boolean = false;
|
||||||
@Input('taDebounce') debounce: number = 250;
|
@Input('taDebounce') public debounce: number = 250;
|
||||||
|
|
||||||
@Output('taSelected') selected = new EventEmitter<any>();
|
@Output('typeahead') public typeahead = new EventEmitter<any>();
|
||||||
@Output('taEntered') entered = new EventEmitter<any>();
|
@Output('taSelected') public selected = new EventEmitter<any>();
|
||||||
|
@Output('taEntered') public entered = new EventEmitter<any>();
|
||||||
|
|
||||||
private itemSelected: boolean = false;
|
private itemSelected: boolean = false;
|
||||||
private existingTimer: ng.IPromise<void> = null;
|
private existingTimer: ng.IPromise<void> = null;
|
||||||
|
@ -28,10 +29,25 @@ export class TypeaheadDirective implements AfterContentInit {
|
||||||
@Inject('$timeout') private $timeout: ng.ITimeoutService) {
|
@Inject('$timeout') private $timeout: ng.ITimeoutService) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@HostListener('keyup', ['$event'])
|
||||||
|
public onKeyup(event: JQueryKeyEventObject): void {
|
||||||
|
if (!this.itemSelected && event.keyCode == 13) {
|
||||||
|
this.entered.emit({
|
||||||
|
'value': $(this.$element).typeahead('val'),
|
||||||
|
'callback': (reset: boolean) => {
|
||||||
|
if (reset) {
|
||||||
|
this.itemSelected = false;
|
||||||
|
$(this.$element).typeahead('val', '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public ngAfterContentInit(): void {
|
public ngAfterContentInit(): void {
|
||||||
var templates = null;
|
var templates = null;
|
||||||
if (this.suggestionTemplate) {
|
if (this.suggestionTemplate) {
|
||||||
templates = {}
|
templates = {};
|
||||||
|
|
||||||
if (this.suggestionTemplate) {
|
if (this.suggestionTemplate) {
|
||||||
templates['suggestion'] = this.buildTemplateHandler(this.suggestionTemplate);
|
templates['suggestion'] = this.buildTemplateHandler(this.suggestionTemplate);
|
||||||
|
@ -42,7 +58,7 @@ export class TypeaheadDirective implements AfterContentInit {
|
||||||
if (this.clearOnSelect) {
|
if (this.clearOnSelect) {
|
||||||
$(this.$element).typeahead('val', '');
|
$(this.$element).typeahead('val', '');
|
||||||
}
|
}
|
||||||
this.selected.emit({'result': suggestion})
|
this.selected.emit({'result': suggestion});
|
||||||
this.itemSelected = true;
|
this.itemSelected = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -72,21 +88,6 @@ export class TypeaheadDirective implements AfterContentInit {
|
||||||
}, this.debounce);
|
}, this.debounce);
|
||||||
}
|
}
|
||||||
|
|
||||||
@HostListener('keyup', ['$event'])
|
|
||||||
public onKeyup(event: JQueryKeyEventObject): void {
|
|
||||||
if (!this.itemSelected && event.keyCode == 13) {
|
|
||||||
this.entered.emit({
|
|
||||||
'value': $(this.$element).typeahead('val'),
|
|
||||||
'callback': (reset: boolean) => {
|
|
||||||
if (reset) {
|
|
||||||
this.itemSelected = false;
|
|
||||||
$(this.$element).typeahead('val', '');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildTemplateHandler(templateUrl: string): Function {
|
private buildTemplateHandler(templateUrl: string): Function {
|
||||||
return (value) => {
|
return (value) => {
|
||||||
var resultDiv = document.createElement('div');
|
var resultDiv = document.createElement('div');
|
||||||
|
@ -101,4 +102,4 @@ export class TypeaheadDirective implements AfterContentInit {
|
||||||
return resultDiv;
|
return resultDiv;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,6 @@ import { Input, Component } from 'ng-metadata/core';
|
||||||
templateUrl: '/static/js/directives/ui/visibility-indicator/visibility-indicator.component.html'
|
templateUrl: '/static/js/directives/ui/visibility-indicator/visibility-indicator.component.html'
|
||||||
})
|
})
|
||||||
export class VisibilityIndicatorComponent {
|
export class VisibilityIndicatorComponent {
|
||||||
|
|
||||||
@Input('<') public repository: any;
|
@Input('<') public repository: any;
|
||||||
|
|
||||||
constructor() {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,8 @@ function provideConfig($provide: ng.auto.IProvideService,
|
||||||
var tooltipFactory: any = $tooltipProvider.$get[$tooltipProvider.$get.length - 1];
|
var tooltipFactory: any = $tooltipProvider.$get[$tooltipProvider.$get.length - 1];
|
||||||
$tooltipProvider.$get[$tooltipProvider.$get.length - 1] = function($window: ng.IWindowService) {
|
$tooltipProvider.$get[$tooltipProvider.$get.length - 1] = function($window: ng.IWindowService) {
|
||||||
if ('ontouchstart' in $window) {
|
if ('ontouchstart' in $window) {
|
||||||
var existing: any = tooltipFactory.apply(this, arguments);
|
const existing: any = tooltipFactory.apply(this, arguments);
|
||||||
|
|
||||||
return function(element) {
|
return function(element) {
|
||||||
// Note: We only disable bs-tooltip's themselves. $tooltip is used for other things
|
// Note: We only disable bs-tooltip's themselves. $tooltip is used for other things
|
||||||
// (such as the datepicker), so we need to be specific when canceling it.
|
// (such as the datepicker), so we need to be specific when canceling it.
|
||||||
|
|
|
@ -45,7 +45,8 @@ export function provideRun($rootScope: QuayRunScope,
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const invalid_token: boolean = response.data['title'] == 'invalid_token' || response.data['error_type'] == 'invalid_token';
|
const invalid_token: boolean = response.data['title'] == 'invalid_token' ||
|
||||||
|
response.data['error_type'] == 'invalid_token';
|
||||||
if (response !== undefined &&
|
if (response !== undefined &&
|
||||||
response.status == 401 &&
|
response.status == 401 &&
|
||||||
invalid_token &&
|
invalid_token &&
|
||||||
|
@ -92,7 +93,7 @@ export function provideRun($rootScope: QuayRunScope,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$rootScope.$on('$routeChangeSuccess', function (event, current, previous) {
|
$rootScope.$on('$routeChangeSuccess', function(event, current, previous) {
|
||||||
$rootScope.current = current.$$route;
|
$rootScope.current = current.$$route;
|
||||||
$rootScope.currentPage = current;
|
$rootScope.currentPage = current;
|
||||||
$rootScope.pageClass = '';
|
$rootScope.pageClass = '';
|
||||||
|
@ -126,7 +127,7 @@ interface QuayRunScope extends ng.IRootScopeService {
|
||||||
currentPage: any;
|
currentPage: any;
|
||||||
current: any;
|
current: any;
|
||||||
title: any;
|
title: any;
|
||||||
description: string,
|
description: string;
|
||||||
pageClass: any;
|
pageClass: any;
|
||||||
newLayout: any;
|
newLayout: any;
|
||||||
fixFooter: any;
|
fixFooter: any;
|
||||||
|
|
|
@ -13,7 +13,9 @@ import { CorTableComponent } from './directives/ui/cor-table/cor-table.component
|
||||||
import { CorTableColumn } from './directives/ui/cor-table/cor-table-col.component';
|
import { CorTableColumn } from './directives/ui/cor-table/cor-table-col.component';
|
||||||
import { ChannelIconComponent } from './directives/ui/channel-icon/channel-icon.component';
|
import { ChannelIconComponent } from './directives/ui/channel-icon/channel-icon.component';
|
||||||
import { TagSigningDisplayComponent } from './directives/ui/tag-signing-display/tag-signing-display.component';
|
import { TagSigningDisplayComponent } from './directives/ui/tag-signing-display/tag-signing-display.component';
|
||||||
import { RepositorySigningConfigComponent } from './directives/ui/repository-signing-config/repository-signing-config.component';
|
import {
|
||||||
|
RepositorySigningConfigComponent
|
||||||
|
} from './directives/ui/repository-signing-config/repository-signing-config.component';
|
||||||
import { TimeMachineSettingsComponent } from './directives/ui/time-machine-settings/time-machine-settings.component';
|
import { TimeMachineSettingsComponent } from './directives/ui/time-machine-settings/time-machine-settings.component';
|
||||||
import { DurationInputComponent } from './directives/ui/duration-input/duration-input.component';
|
import { DurationInputComponent } from './directives/ui/duration-input/duration-input.component';
|
||||||
import { SearchBoxComponent } from './directives/ui/search-box/search-box.component';
|
import { SearchBoxComponent } from './directives/ui/search-box/search-box.component';
|
||||||
|
@ -22,7 +24,6 @@ import { BuildServiceImpl } from './services/build/build.service.impl';
|
||||||
import { AvatarServiceImpl } from './services/avatar/avatar.service.impl';
|
import { AvatarServiceImpl } from './services/avatar/avatar.service.impl';
|
||||||
import { DockerfileServiceImpl } from './services/dockerfile/dockerfile.service.impl';
|
import { DockerfileServiceImpl } from './services/dockerfile/dockerfile.service.impl';
|
||||||
import { DataFileServiceImpl } from './services/datafile/datafile.service.impl';
|
import { DataFileServiceImpl } from './services/datafile/datafile.service.impl';
|
||||||
import { UtilServiceImpl } from './services/util/util.service.impl';
|
|
||||||
import { QuayRequireDirective } from './directives/structural/quay-require/quay-require.directive';
|
import { QuayRequireDirective } from './directives/structural/quay-require/quay-require.directive';
|
||||||
import { MarkdownInputComponent } from './directives/ui/markdown/markdown-input.component';
|
import { MarkdownInputComponent } from './directives/ui/markdown/markdown-input.component';
|
||||||
import { MarkdownViewComponent } from './directives/ui/markdown/markdown-view.component';
|
import { MarkdownViewComponent } from './directives/ui/markdown/markdown-view.component';
|
||||||
|
|
|
@ -47,4 +47,4 @@ export class AvatarServiceImpl implements AvatarService {
|
||||||
|
|
||||||
return this.cache[cacheKey] = hash;
|
return this.cache[cacheKey] = hash;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,4 +19,4 @@ export abstract class AvatarService {
|
||||||
* @return hash The hash for the avatar image.
|
* @return hash The hash for the avatar image.
|
||||||
*/
|
*/
|
||||||
public abstract computeHash(email?: string, name?: string): string;
|
public abstract computeHash(email?: string, name?: string): string;
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,7 +73,8 @@ export class BuildServiceImpl implements BuildService {
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'internalerror':
|
case 'internalerror':
|
||||||
message = 'An internal system error occurred while building; the build will be retried in the next few minutes.';
|
message = 'An internal system error occurred while building; ' +
|
||||||
|
'the build will be retried in the next few minutes.';
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'cancelled':
|
case 'cancelled':
|
||||||
|
@ -86,4 +87,4 @@ export class BuildServiceImpl implements BuildService {
|
||||||
|
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,4 +16,4 @@ export abstract class BuildService {
|
||||||
* @return buildMessage The message associated with the given phase.
|
* @return buildMessage The message associated with the given phase.
|
||||||
*/
|
*/
|
||||||
public abstract getBuildMessage(phase: string): string;
|
public abstract getBuildMessage(phase: string): string;
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ export class DataFileServiceImpl implements DataFileService {
|
||||||
var zip = null;
|
var zip = null;
|
||||||
var zipFiles = null;
|
var zipFiles = null;
|
||||||
try {
|
try {
|
||||||
var zip = new JSZip(buf);
|
zip = new JSZip(buf);
|
||||||
zipFiles = zip.files;
|
zipFiles = zip.files;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
failure();
|
failure();
|
||||||
|
@ -164,9 +164,9 @@ export class DataFileServiceImpl implements DataFileService {
|
||||||
'name': this.getName(path),
|
'name': this.getName(path),
|
||||||
'path': path,
|
'path': path,
|
||||||
'canRead': true,
|
'canRead': true,
|
||||||
'toBlob': (function(currentFile) {
|
'toBlob': (function(file) {
|
||||||
return function() {
|
return function() {
|
||||||
return new Blob([currentFile.buffer], {type: 'application/octet-binary'});
|
return new Blob([file.buffer], {type: 'application/octet-binary'});
|
||||||
};
|
};
|
||||||
}(currentFile))
|
}(currentFile))
|
||||||
});
|
});
|
||||||
|
@ -179,4 +179,4 @@ export class DataFileServiceImpl implements DataFileService {
|
||||||
failure();
|
failure();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,4 +45,4 @@ export abstract class DataFileService {
|
||||||
progress: (percent: number) => void,
|
progress: (percent: number) => void,
|
||||||
error: () => void,
|
error: () => void,
|
||||||
loaded: (uint8array: Uint8Array) => void): void;
|
loaded: (uint8array: Uint8Array) => void): void;
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,11 +104,11 @@ export class DockerfileInfoImpl implements DockerfileInfo {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (baseImage.indexOf(this.config.getDomain() + '/') != 0) {
|
if (baseImage.indexOf(`${this.config.getDomain()}/`) != 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return baseImage.substring(this.config.getDomain().length + 1);
|
return baseImage.substring(<number>this.config.getDomain().length + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public getBaseImage(): string | null {
|
public getBaseImage(): string | null {
|
||||||
|
@ -152,4 +152,4 @@ export class DockerfileInfoImpl implements DockerfileInfo {
|
||||||
|
|
||||||
return baseImageAndTag;
|
return baseImageAndTag;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,4 +35,4 @@ export abstract class DockerfileInfo {
|
||||||
* @return baseImageAndTag The base image and tag.
|
* @return baseImageAndTag The base image and tag.
|
||||||
*/
|
*/
|
||||||
public abstract getBaseImageAndTag(): string | null;
|
public abstract getBaseImageAndTag(): string | null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
import { Injectable } from 'ng-metadata/core';
|
import { Injectable } from 'ng-metadata/core';
|
||||||
import { PageService } from './page.service';
|
import { PageService, QuayPage, QuayPageProfile } from './page.service';
|
||||||
|
|
||||||
|
|
||||||
@Injectable(PageService.name)
|
@Injectable(PageService.name)
|
||||||
export class PageServiceImpl implements ng.IServiceProvider {
|
export class PageServiceImpl implements ng.IServiceProvider {
|
||||||
|
|
||||||
private pages: any = {};
|
private pages: {[pageName: string]: QuayPage} = {};
|
||||||
|
|
||||||
constructor() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public create(pageName: string,
|
public create(pageName: string,
|
||||||
templateName: string,
|
templateName: string,
|
||||||
|
@ -26,8 +22,8 @@ export class PageServiceImpl implements ng.IServiceProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public get(pageName: string, profiles: any[]): any[] | null {
|
public get(pageName: string, profiles: QuayPageProfile[]): [QuayPageProfile, QuayPage] | null {
|
||||||
for (var i = 0; i < profiles.length; ++i) {
|
for (let i = 0; i < profiles.length; ++i) {
|
||||||
var current = profiles[i];
|
var current = profiles[i];
|
||||||
var key = current.id + ':' + pageName;
|
var key = current.id + ':' + pageName;
|
||||||
var page = this.pages[key];
|
var page = this.pages[key];
|
||||||
|
|
|
@ -22,7 +22,7 @@ export abstract class PageService implements ng.IServiceProvider {
|
||||||
* @param pageName The name of the page.
|
* @param pageName The name of the page.
|
||||||
* @param profiles Available profiles to search.
|
* @param profiles Available profiles to search.
|
||||||
*/
|
*/
|
||||||
public abstract get(pageName: string, profiles: any[]): any[] | null;
|
public abstract get(pageName: string, profiles: QuayPageProfile[]): [QuayPageProfile, QuayPage] | null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provide the service instance.
|
* Provide the service instance.
|
||||||
|
@ -30,3 +30,24 @@ export abstract class PageService implements ng.IServiceProvider {
|
||||||
*/
|
*/
|
||||||
public abstract $get(): PageService;
|
public abstract $get(): PageService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A type representing a registered application page.
|
||||||
|
*/
|
||||||
|
export type QuayPage = {
|
||||||
|
name: string;
|
||||||
|
controller: ng.IController;
|
||||||
|
templateName: string,
|
||||||
|
flags: {[key: string]: any};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a page profile type.
|
||||||
|
*/
|
||||||
|
export type QuayPageProfile = {
|
||||||
|
id: string;
|
||||||
|
templatePath: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
|
@ -67,9 +67,9 @@ function(KeyService, UserService, CookieService, ApiService, Features, Config, $
|
||||||
|
|
||||||
planService.getPlan(planId, function(plan) {
|
planService.getPlan(planId, function(plan) {
|
||||||
if (planService.isOrgCompatible(plan)) {
|
if (planService.isOrgCompatible(plan)) {
|
||||||
$location.path('/organizations/new/?plan=' + planId);
|
$location.path('/organizations/new').search('plan', planId);
|
||||||
} else {
|
} else {
|
||||||
$location.path('/user?plan=' + planId);
|
$location.path('/user').search('plan', planId);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
import { RouteBuilder } from './route-builder.service';
|
import { RouteBuilder } from './route-builder.service';
|
||||||
import { Injectable, Inject } from 'ng-metadata/core';
|
import { Injectable, Inject } from 'ng-metadata/core';
|
||||||
import { PageService } from '../page/page.service';
|
import { PageService, QuayPage, QuayPageProfile } from '../page/page.service';
|
||||||
|
|
||||||
|
|
||||||
@Injectable(RouteBuilder.name)
|
@Injectable(RouteBuilder.name)
|
||||||
export class RouteBuilderImpl implements RouteBuilder {
|
export class RouteBuilderImpl implements RouteBuilder {
|
||||||
|
|
||||||
public currentProfile: string = 'layout';
|
public currentProfile: string = 'layout';
|
||||||
public profiles: any[] = [
|
public profiles: QuayPageProfile[] = [
|
||||||
// Start with the old pages (if we asked for it).
|
// Start with the old pages (if we asked for it).
|
||||||
{id: 'old-layout', templatePath: '/static/partials/'},
|
{id: 'old-layout', templatePath: '/static/partials/'},
|
||||||
// Fallback back combined new/existing pages.
|
// Fallback back combined new/existing pages.
|
||||||
|
@ -50,4 +50,4 @@ export class RouteBuilderImpl implements RouteBuilder {
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,4 +15,4 @@ export abstract class RouteBuilder {
|
||||||
* @param pagename The name of the page to associate with this route.
|
* @param pagename The name of the page to associate with this route.
|
||||||
*/
|
*/
|
||||||
public abstract route(path: string, pagename: string): RouteBuilder;
|
public abstract route(path: string, pagename: string): RouteBuilder;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
import { UtilServiceImpl } from './util.service.impl';
|
|
||||||
|
|
||||||
|
|
||||||
describe("UtilServiceImpl", () => {
|
|
||||||
var utilServiceImpl: UtilServiceImpl;
|
|
||||||
var $sanitizeMock: ng.sanitize.ISanitizeService;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
$sanitizeMock = jasmine.createSpy('$sanitizeSpy').and.returnValue("");
|
|
||||||
utilServiceImpl = new UtilServiceImpl($sanitizeMock);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("isAdBlockEnabled", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("isEmailAddress", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("getMarkedDown", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("getFirstMarkdownLineAsText", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("escapeHtmlString", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("getRestUrl", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("textToSafeHtml", () => {
|
|
||||||
// TODO
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,39 +0,0 @@
|
||||||
import { Injectable, Inject } from 'ng-metadata/core';
|
|
||||||
import { UtilService } from './util.service';
|
|
||||||
|
|
||||||
|
|
||||||
@Injectable(UtilService.name)
|
|
||||||
export class UtilServiceImpl implements UtilService {
|
|
||||||
|
|
||||||
constructor(@Inject('$sanitize') private $sanitize: ng.sanitize.ISanitizeService) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public isAdBlockEnabled(callback: (isEnabled: boolean) => void): void {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public isEmailAddress(str: string): boolean {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getMarkedDown(str: string): string {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getFirstMarkdownLineAsText(commentString: string, placeholderNeeded: boolean): string {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public escapeHtmlString(text: string): string {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getRestUrl(args: any[]): string {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public textToSafeHtml(text: string): string {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
/**
|
|
||||||
* Service which exposes various utility methods.
|
|
||||||
*/
|
|
||||||
export abstract class UtilService {
|
|
||||||
|
|
||||||
public abstract isAdBlockEnabled(callback: (isEnabled: boolean) => void): void;
|
|
||||||
|
|
||||||
public abstract isEmailAddress(str: string): boolean;
|
|
||||||
|
|
||||||
public abstract getMarkedDown(str: string): string;
|
|
||||||
|
|
||||||
public abstract getFirstMarkdownLineAsText(commentString: string, placeholderNeeded: boolean): string;
|
|
||||||
|
|
||||||
public abstract escapeHtmlString(text: string): string;
|
|
||||||
|
|
||||||
public abstract getRestUrl(args: any[]): string;
|
|
||||||
|
|
||||||
public abstract textToSafeHtml(text: string): string;
|
|
||||||
}
|
|
|
@ -93,4 +93,4 @@ export class ViewArrayImpl implements ViewArray {
|
||||||
this.timerRef = null;
|
this.timerRef = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,4 +68,4 @@ export abstract class ViewArray {
|
||||||
* @return viewArray New ViewArray instance.
|
* @return viewArray New ViewArray instance.
|
||||||
*/
|
*/
|
||||||
public abstract create(): ViewArrayImpl;
|
public abstract create(): ViewArrayImpl;
|
||||||
}
|
}
|
||||||
|
|
128
static/test/e2e/image-repo.scenario.ts
Normal file
128
static/test/e2e/image-repo.scenario.ts
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
import { browser, element, by, $, $$ } from 'protractor';
|
||||||
|
import { appHost } from '../protractor.conf';
|
||||||
|
import { CorTabsViewObject } from '../../js/directives/ui/cor-tabs/cor-tabs.view-object';
|
||||||
|
|
||||||
|
|
||||||
|
describe("Image Repository", () => {
|
||||||
|
const username = 'devtable';
|
||||||
|
const password = 'password';
|
||||||
|
const repoTabs: CorTabsViewObject = new CorTabsViewObject();
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
browser.waitForAngularEnabled(false);
|
||||||
|
|
||||||
|
// Sign in
|
||||||
|
browser.get(appHost);
|
||||||
|
$$('a[href="/signin/"]').get(1).click();
|
||||||
|
$('#signin-username').sendKeys(username);
|
||||||
|
$('#signin-password').sendKeys(password);
|
||||||
|
element(by.partialButtonText('Sign in')).click();
|
||||||
|
browser.sleep(4000);
|
||||||
|
|
||||||
|
// Navigate to image repository
|
||||||
|
browser.get(`${appHost}/repository/devtable/simple`).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
browser.waitForAngularEnabled(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("information tab", () => {
|
||||||
|
const tabTitle: string = 'Information';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository description", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('h4', 'Description')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("tags tab", () => {
|
||||||
|
const tabTitle: string = 'Tags';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository tags", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('.tab-header', 'Repository Tags')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("tag history tab", () => {
|
||||||
|
const tabTitle: string = 'Tag History';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository tags", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('.tab-header', 'Tag History')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("builds tab", () => {
|
||||||
|
const tabTitle: string = 'Builds';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository tags", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('.tab-header', 'Repository Builds')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("usage logs tab", () => {
|
||||||
|
const tabTitle: string = 'Usage Logs';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository tags", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('h3', 'Usage Logs')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("settings tab", () => {
|
||||||
|
const tabTitle: string = 'Settings';
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle(tabTitle).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays repository tags", () => {
|
||||||
|
expect(repoTabs.isActiveTab(tabTitle)).toBe(true);
|
||||||
|
expect(element(by.cssContainingText('.tab-header', 'Settings')).isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("tabs navigation", () => {
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
repoTabs.selectTabByTitle('Information');
|
||||||
|
repoTabs.selectTabByTitle('Tags');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("back button returns to previous tab", () => {
|
||||||
|
browser.navigate().back();
|
||||||
|
|
||||||
|
expect(repoTabs.isActiveTab('Information')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("forward button returns to next tab", () => {
|
||||||
|
browser.navigate().forward();
|
||||||
|
|
||||||
|
expect(repoTabs.isActiveTab('Tags')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
19
static/test/e2e/sanity.scenario.ts
Normal file
19
static/test/e2e/sanity.scenario.ts
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import { browser } from 'protractor';
|
||||||
|
import { appHost } from '../protractor.conf';
|
||||||
|
|
||||||
|
|
||||||
|
describe("sanity test", () => {
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
browser.get(appHost);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("loads home view with no AngularJS errors", () => {
|
||||||
|
browser.manage().logs().get('browser')
|
||||||
|
.then((browserLog: any) => {
|
||||||
|
browserLog.forEach((log: any) => {
|
||||||
|
expect(log.message).not.toContain("angular");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
155
static/test/e2e/trigger-creation.scenario.ts
Normal file
155
static/test/e2e/trigger-creation.scenario.ts
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
import { browser, element, by, $, $$ } from 'protractor';
|
||||||
|
import { ManageTriggerViewObject } from '../../js/directives/ui/manage-trigger/manage-trigger.view-object';
|
||||||
|
import { appHost } from '../protractor.conf';
|
||||||
|
|
||||||
|
|
||||||
|
describe("Trigger Creation", () => {
|
||||||
|
const username = 'devtable';
|
||||||
|
const password = 'password';
|
||||||
|
var manageTriggerView: ManageTriggerViewObject = new ManageTriggerViewObject();
|
||||||
|
|
||||||
|
beforeAll((done) => {
|
||||||
|
browser.waitForAngularEnabled(false);
|
||||||
|
|
||||||
|
// Sign in
|
||||||
|
browser.get(appHost);
|
||||||
|
$$('a[href="/signin/"]').get(1).click();
|
||||||
|
$('#signin-username').sendKeys(username);
|
||||||
|
$('#signin-password').sendKeys(password);
|
||||||
|
element(by.partialButtonText('Sign in')).click();
|
||||||
|
browser.sleep(4000).then(() => done());
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
browser.waitForAngularEnabled(true);
|
||||||
|
// TODO(alecmerdler): Delete all created triggers
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("for custom git", () => {
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
// Navigate to trigger setup
|
||||||
|
browser.get(`${appHost}/repository/devtable/simple?tab=builds`)
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can select custom git repository push as a trigger option", (done) => {
|
||||||
|
element(by.buttonText('Create Build Trigger')).click();
|
||||||
|
element(by.linkText('Custom Git Repository Push')).click();
|
||||||
|
browser.sleep(1000);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("shows custom git repository section first", () => {
|
||||||
|
expect(manageTriggerView.sections['customrepo'].isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not accept invalid custom git repository URL's", () => {
|
||||||
|
manageTriggerView.continue()
|
||||||
|
.then(() => fail('Should not accept empty input for repository URL'))
|
||||||
|
.catch(() => manageTriggerView.enterRepositoryURL('git@some'))
|
||||||
|
.then(() => manageTriggerView.continue())
|
||||||
|
.then(() => fail('Should not accept invalid input for repository URL'))
|
||||||
|
.catch(() => null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("proceeds to Dockerfile location section when given valid URL", () => {
|
||||||
|
manageTriggerView.enterRepositoryURL('git@somegit.com:someuser/somerepo.git');
|
||||||
|
manageTriggerView.continue()
|
||||||
|
.then(() => {
|
||||||
|
expect(manageTriggerView.sections['dockerfilelocation'].isDisplayed()).toBe(true);
|
||||||
|
})
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not accept Dockerfile location that does not end with a filename", () => {
|
||||||
|
manageTriggerView.enterDockerfileLocation('/')
|
||||||
|
.then(() => manageTriggerView.continue())
|
||||||
|
.then(() => fail('Should not accept Dockerfile location that does not end with a filename'))
|
||||||
|
.catch(() => null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not provide Dockerfile location suggestions", () => {
|
||||||
|
manageTriggerView.getDockerfileSuggestions()
|
||||||
|
.then((results) => {
|
||||||
|
expect(results.length).toEqual(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("proceeds to Docker context location section when given a valid Dockerfile location", () => {
|
||||||
|
manageTriggerView.enterDockerfileLocation('/Dockerfile')
|
||||||
|
.then(() => manageTriggerView.continue())
|
||||||
|
.then(() => {
|
||||||
|
expect(manageTriggerView.sections['contextlocation'].isDisplayed()).toBe(true);
|
||||||
|
})
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not accept invalid Docker context", () => {
|
||||||
|
manageTriggerView.enterDockerContext('')
|
||||||
|
.then(() => manageTriggerView.continue())
|
||||||
|
.then(() => fail('Should not acccept invalid Docker context location'))
|
||||||
|
.catch(() => null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("provides suggestions for Docker context based on Dockerfile location", () => {
|
||||||
|
manageTriggerView.getDockerContextSuggestions()
|
||||||
|
.then((results) => {
|
||||||
|
expect(results).toContain('/');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("proceeds to robot selection section when given valid Docker context", () => {
|
||||||
|
manageTriggerView.enterDockerContext('/')
|
||||||
|
.then(() => manageTriggerView.continue())
|
||||||
|
.then(() => {
|
||||||
|
expect(manageTriggerView.sections['robot'].isDisplayed()).toBe(true);
|
||||||
|
})
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("allows selection of optional robot account", () => {
|
||||||
|
manageTriggerView.selectRobotAccount(0)
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("proceeds to verification section", () => {
|
||||||
|
manageTriggerView.continue()
|
||||||
|
.then(() => {
|
||||||
|
expect(manageTriggerView.sections['verification'].isDisplayed()).toBe(true);
|
||||||
|
})
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("displays success message after creating the trigger", () => {
|
||||||
|
manageTriggerView.continue()
|
||||||
|
.then(() => {
|
||||||
|
browser.sleep(2000);
|
||||||
|
expect($('h3').getText()).toEqual('Trigger has been successfully activated');
|
||||||
|
})
|
||||||
|
.catch(reason => fail(reason));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("for githost", () => {
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
// Navigate to trigger setup
|
||||||
|
browser.get(`${appHost}/repository/devtable/simple?tab=builds`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can select GitHub repository push as a trigger option", () => {
|
||||||
|
element(by.partialButtonText('Create Build Trigger')).click();
|
||||||
|
element(by.linkText('GitHub Repository Push')).click();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("redirects to GitHub login page for granting authentication", () => {
|
||||||
|
expect(browser.getCurrentUrl()).toContain('github.com');
|
||||||
|
|
||||||
|
// TODO: Which credentials do we use to login to GitHub?
|
||||||
|
});
|
||||||
|
|
||||||
|
xit("shows namespace select section first", () => {
|
||||||
|
expect(manageTriggerView.sections['namespace'].isDisplayed()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
67
static/test/protractor.conf.ts
Normal file
67
static/test/protractor.conf.ts
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import { Config, browser } from 'protractor';
|
||||||
|
import * as request from 'request';
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Use a set environment variable or default value for the app host.
|
||||||
|
*/
|
||||||
|
export const appHost: string = process.env.APP_HOST || 'http://localhost:5000';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Protractor is configured to run against a Selenium instance running locally on port 4444 and a Quay instance running
|
||||||
|
* locally on port 5000.
|
||||||
|
* Easiest method is running the Quay and Selenium containers:
|
||||||
|
* $ docker run -d --net=host -v /dev/shm:/dev/shm selenium/standalone-chrome:3.4.0
|
||||||
|
* $ docker run -d --net=host quay.io/quay/quay
|
||||||
|
* $ yarn run e2e
|
||||||
|
*/
|
||||||
|
export const config: Config = {
|
||||||
|
framework: 'jasmine',
|
||||||
|
seleniumAddress: 'http://localhost:4444/wd/hub',
|
||||||
|
// Uncomment to run tests against local Chrome instance
|
||||||
|
directConnect: true,
|
||||||
|
capabilities: {
|
||||||
|
browserName: 'chrome',
|
||||||
|
chromeOptions: {
|
||||||
|
args: [
|
||||||
|
'--disable-infobars'
|
||||||
|
],
|
||||||
|
prefs: {
|
||||||
|
'profile.password_manager_enabled': false,
|
||||||
|
'credentials_enable_service': false,
|
||||||
|
'password_manager_enabled': false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onPrepare: () => {
|
||||||
|
browser.driver.manage().window().maximize();
|
||||||
|
|
||||||
|
// Resolve promise when request returns HTTP 200
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const pollServer = (success, failure) => {
|
||||||
|
request(appHost, (error, response, body) => {
|
||||||
|
if (!error && response.statusCode == 200) {
|
||||||
|
console.log(`Successfully connected to server at ${appHost}`);
|
||||||
|
success();
|
||||||
|
} else {
|
||||||
|
console.log(`Could not connect to server at ${appHost}`);
|
||||||
|
setTimeout(() => {
|
||||||
|
failure(success, failure);
|
||||||
|
}, 5000);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
pollServer(resolve, pollServer);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onComplete: () => {
|
||||||
|
browser.close();
|
||||||
|
},
|
||||||
|
specs: [
|
||||||
|
// './e2e/sanity.scenario.ts',
|
||||||
|
// './e2e/trigger-creation.scenario.ts',
|
||||||
|
'./e2e/image-repo.scenario.ts',
|
||||||
|
],
|
||||||
|
};
|
|
@ -15,6 +15,7 @@ from data.model.user import LoginWrappedDBUser
|
||||||
from endpoints.api import api_bp
|
from endpoints.api import api_bp
|
||||||
from endpoints.appr import appr_bp
|
from endpoints.appr import appr_bp
|
||||||
from endpoints.web import web
|
from endpoints.web import web
|
||||||
|
from endpoints.verbs import verbs as verbs_bp
|
||||||
|
|
||||||
from initdb import initialize_database, populate_database
|
from initdb import initialize_database, populate_database
|
||||||
|
|
||||||
|
@ -166,6 +167,7 @@ def app(appconfig, initialized_db):
|
||||||
app.register_blueprint(api_bp, url_prefix='/api')
|
app.register_blueprint(api_bp, url_prefix='/api')
|
||||||
app.register_blueprint(appr_bp, url_prefix='/cnr')
|
app.register_blueprint(appr_bp, url_prefix='/cnr')
|
||||||
app.register_blueprint(web, url_prefix='/')
|
app.register_blueprint(web, url_prefix='/')
|
||||||
|
app.register_blueprint(verbs_bp, url_prefix='/c1')
|
||||||
|
|
||||||
app.config.update(appconfig)
|
app.config.update(appconfig)
|
||||||
return app
|
return app
|
||||||
|
|
|
@ -509,100 +509,3 @@ def build_v2_index_specs():
|
||||||
request_status(401, 401, 401, 401, 404),
|
request_status(401, 401, 401, 401, 404),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class VerbTestSpec(object):
|
|
||||||
def __init__(self, index_name, method_name, repo_name, rpath=False, **kwargs):
|
|
||||||
self.index_name = index_name
|
|
||||||
self.repo_name = repo_name
|
|
||||||
self.method_name = method_name
|
|
||||||
self.single_repository_path = rpath
|
|
||||||
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
self.anon_code = 401
|
|
||||||
self.no_access_code = 403
|
|
||||||
self.read_code = 200
|
|
||||||
self.admin_code = 200
|
|
||||||
self.creator_code = 200
|
|
||||||
|
|
||||||
def request_status(self, anon_code=401, no_access_code=403, read_code=200, creator_code=200,
|
|
||||||
admin_code=200):
|
|
||||||
self.anon_code = anon_code
|
|
||||||
self.no_access_code = no_access_code
|
|
||||||
self.read_code = read_code
|
|
||||||
self.creator_code = creator_code
|
|
||||||
self.admin_code = admin_code
|
|
||||||
return self
|
|
||||||
|
|
||||||
def get_url(self):
|
|
||||||
if self.single_repository_path:
|
|
||||||
return url_for(self.index_name, repository=self.repo_name, **self.kwargs)
|
|
||||||
else:
|
|
||||||
(namespace, repo_name) = self.repo_name.split('/')
|
|
||||||
return url_for(self.index_name, namespace=namespace, repository=repo_name, **self.kwargs)
|
|
||||||
|
|
||||||
def gen_basic_auth(self, username, password):
|
|
||||||
encoded = b64encode('%s:%s' % (username, password))
|
|
||||||
return 'basic %s' % encoded
|
|
||||||
|
|
||||||
ACI_ARGS = {
|
|
||||||
'server': 'someserver',
|
|
||||||
'tag': 'fake',
|
|
||||||
'os': 'linux',
|
|
||||||
'arch': 'x64',
|
|
||||||
}
|
|
||||||
|
|
||||||
def build_verbs_specs():
|
|
||||||
return [
|
|
||||||
# get_aci_signature
|
|
||||||
VerbTestSpec('verbs.get_aci_signature', 'GET', PUBLIC_REPO, **ACI_ARGS).
|
|
||||||
request_status(404, 404, 404, 404, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_signature', 'GET', PRIVATE_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_signature', 'GET', ORG_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_signature', 'GET', ANOTHER_ORG_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 403, 403, 404),
|
|
||||||
|
|
||||||
# get_aci_image
|
|
||||||
VerbTestSpec('verbs.get_aci_image', 'GET', PUBLIC_REPO, **ACI_ARGS).
|
|
||||||
request_status(404, 404, 404, 404, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_image', 'GET', PRIVATE_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_image', 'GET', ORG_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_aci_image', 'GET', ANOTHER_ORG_REPO, **ACI_ARGS).
|
|
||||||
request_status(403, 403, 403, 403, 404),
|
|
||||||
|
|
||||||
# get_squashed_tag
|
|
||||||
VerbTestSpec('verbs.get_squashed_tag', 'GET', PUBLIC_REPO, tag='fake').
|
|
||||||
request_status(404, 404, 404, 404, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_squashed_tag', 'GET', PRIVATE_REPO, tag='fake').
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_squashed_tag', 'GET', ORG_REPO, tag='fake').
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_squashed_tag', 'GET', ANOTHER_ORG_REPO, tag='fake').
|
|
||||||
request_status(403, 403, 403, 403, 404),
|
|
||||||
|
|
||||||
# get_tag_torrent
|
|
||||||
VerbTestSpec('verbs.get_tag_torrent', 'GET', PUBLIC_REPO, digest='sha256:1234', rpath=True).
|
|
||||||
request_status(404, 404, 404, 404, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_tag_torrent', 'GET', PRIVATE_REPO, digest='sha256:1234', rpath=True).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_tag_torrent', 'GET', ORG_REPO, digest='sha256:1234', rpath=True).
|
|
||||||
request_status(403, 403, 404, 403, 404),
|
|
||||||
|
|
||||||
VerbTestSpec('verbs.get_tag_torrent', 'GET', ANOTHER_ORG_REPO, digest='sha256:1234', rpath=True).
|
|
||||||
request_status(403, 403, 403, 403, 404),
|
|
||||||
]
|
|
||||||
|
|
|
@ -4834,6 +4834,20 @@ class TestRepositoryManifestLabels(ApiTestCase):
|
||||||
|
|
||||||
self.assertEquals(0, len(json['labels']))
|
self.assertEquals(0, len(json['labels']))
|
||||||
|
|
||||||
|
self.postJsonResponse(RepositoryManifestLabels,
|
||||||
|
params=dict(repository=repository,
|
||||||
|
manifestref=tag_manifest.digest),
|
||||||
|
data=dict(key='bad_label', value='world',
|
||||||
|
media_type='text/plain'),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
|
self.postJsonResponse(RepositoryManifestLabels,
|
||||||
|
params=dict(repository=repository,
|
||||||
|
manifestref=tag_manifest.digest),
|
||||||
|
data=dict(key='hello', value='world',
|
||||||
|
media_type='bad_media_type'),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
# Add some labels to the manifest.
|
# Add some labels to the manifest.
|
||||||
with assert_action_logged('manifest_label_add'):
|
with assert_action_logged('manifest_label_add'):
|
||||||
label1 = self.postJsonResponse(RepositoryManifestLabels,
|
label1 = self.postJsonResponse(RepositoryManifestLabels,
|
||||||
|
|
|
@ -160,7 +160,7 @@ class TestSecurityScanner(unittest.TestCase):
|
||||||
security_scanner.set_internal_error_layer_id(security_scanner.layer_id(layer))
|
security_scanner.set_internal_error_layer_id(security_scanner.layer_id(layer))
|
||||||
|
|
||||||
analyzer = LayerAnalyzer(app.config, self.api)
|
analyzer = LayerAnalyzer(app.config, self.api)
|
||||||
with self.assertRaises(APIRequestFailure) as ctx:
|
with self.assertRaises(APIRequestFailure):
|
||||||
analyzer.analyze_recursively(layer)
|
analyzer.analyze_recursively(layer)
|
||||||
|
|
||||||
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest')
|
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest')
|
||||||
|
@ -185,6 +185,27 @@ class TestSecurityScanner(unittest.TestCase):
|
||||||
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest')
|
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest')
|
||||||
self.assertAnalyzed(layer, security_scanner, False, 1)
|
self.assertAnalyzed(layer, security_scanner, False, 1)
|
||||||
|
|
||||||
|
def test_analyze_layer_unexpected_status(self):
|
||||||
|
""" Tests that a response from a scanner with an unexpected status code fails correctly. """
|
||||||
|
|
||||||
|
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest', include_storage=True)
|
||||||
|
self.assertFalse(layer.security_indexed)
|
||||||
|
self.assertEquals(-1, layer.security_indexed_engine)
|
||||||
|
|
||||||
|
with fake_security_scanner() as security_scanner:
|
||||||
|
# Make is so trying to analyze the parent will fail with an error.
|
||||||
|
security_scanner.set_unexpected_status_layer_id(security_scanner.layer_id(layer.parent))
|
||||||
|
|
||||||
|
# Try to the layer and its parents, but with one request causing an error.
|
||||||
|
analyzer = LayerAnalyzer(app.config, self.api)
|
||||||
|
with self.assertRaises(APIRequestFailure):
|
||||||
|
analyzer.analyze_recursively(layer)
|
||||||
|
|
||||||
|
# Make sure it isn't analyzed.
|
||||||
|
layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest')
|
||||||
|
self.assertAnalyzed(layer, security_scanner, False, -1)
|
||||||
|
|
||||||
|
|
||||||
def test_analyze_layer_missing_parent_handled(self):
|
def test_analyze_layer_missing_parent_handled(self):
|
||||||
""" Tests that a missing parent causes an automatic reanalysis, which succeeds. """
|
""" Tests that a missing parent causes an automatic reanalysis, which succeeds. """
|
||||||
|
|
||||||
|
|
|
@ -1,100 +0,0 @@
|
||||||
import unittest
|
|
||||||
|
|
||||||
import endpoints.decorated # Register the various exceptions via decorators.
|
|
||||||
|
|
||||||
from app import app
|
|
||||||
from endpoints.verbs import verbs
|
|
||||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
|
||||||
from test.specs import build_verbs_specs
|
|
||||||
|
|
||||||
app.register_blueprint(verbs, url_prefix='/c1')
|
|
||||||
|
|
||||||
NO_ACCESS_USER = 'freshuser'
|
|
||||||
READ_ACCESS_USER = 'reader'
|
|
||||||
ADMIN_ACCESS_USER = 'devtable'
|
|
||||||
CREATOR_ACCESS_USER = 'creator'
|
|
||||||
|
|
||||||
|
|
||||||
class EndpointTestCase(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
setup_database_for_testing(self)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
finished_database_for_testing(self)
|
|
||||||
|
|
||||||
|
|
||||||
class _SpecTestBuilder(type):
|
|
||||||
@staticmethod
|
|
||||||
def _test_generator(url, test_spec, attrs):
|
|
||||||
def test(self):
|
|
||||||
with app.test_client() as c:
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
if attrs['auth_username']:
|
|
||||||
headers['Authorization'] = test_spec.gen_basic_auth(attrs['auth_username'], 'password')
|
|
||||||
|
|
||||||
expected_status = getattr(test_spec, attrs['result_attr'])
|
|
||||||
|
|
||||||
rv = c.open(url, headers=headers, method=test_spec.method_name)
|
|
||||||
msg = '%s %s: got %s, expected: %s (auth: %s | headers %s)' % (test_spec.method_name,
|
|
||||||
test_spec.index_name, rv.status_code, expected_status, attrs['auth_username'],
|
|
||||||
headers)
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, expected_status, msg)
|
|
||||||
|
|
||||||
return test
|
|
||||||
|
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
|
||||||
with app.test_request_context() as ctx:
|
|
||||||
specs = attrs['spec_func']()
|
|
||||||
for test_spec in specs:
|
|
||||||
test_name = '%s_%s_%s_%s_%s' % (test_spec.index_name, test_spec.method_name,
|
|
||||||
test_spec.repo_name, attrs['auth_username'] or 'anon',
|
|
||||||
attrs['result_attr'])
|
|
||||||
test_name = test_name.replace('/', '_').replace('-', '_')
|
|
||||||
|
|
||||||
test_name = 'test_' + test_name.lower().replace('verbs.', 'verbs_')
|
|
||||||
url = test_spec.get_url()
|
|
||||||
attrs[test_name] = _SpecTestBuilder._test_generator(url, test_spec, attrs)
|
|
||||||
|
|
||||||
return type(name, bases, attrs)
|
|
||||||
|
|
||||||
|
|
||||||
class TestAnonymousAccess(EndpointTestCase):
|
|
||||||
__metaclass__ = _SpecTestBuilder
|
|
||||||
spec_func = build_verbs_specs
|
|
||||||
result_attr = 'anon_code'
|
|
||||||
auth_username = None
|
|
||||||
|
|
||||||
|
|
||||||
class TestNoAccess(EndpointTestCase):
|
|
||||||
__metaclass__ = _SpecTestBuilder
|
|
||||||
spec_func = build_verbs_specs
|
|
||||||
result_attr = 'no_access_code'
|
|
||||||
auth_username = NO_ACCESS_USER
|
|
||||||
|
|
||||||
|
|
||||||
class TestReadAccess(EndpointTestCase):
|
|
||||||
__metaclass__ = _SpecTestBuilder
|
|
||||||
spec_func = build_verbs_specs
|
|
||||||
result_attr = 'read_code'
|
|
||||||
auth_username = READ_ACCESS_USER
|
|
||||||
|
|
||||||
|
|
||||||
class TestCreatorAccess(EndpointTestCase):
|
|
||||||
__metaclass__ = _SpecTestBuilder
|
|
||||||
spec_func = build_verbs_specs
|
|
||||||
result_attr = 'creator_code'
|
|
||||||
auth_username = CREATOR_ACCESS_USER
|
|
||||||
|
|
||||||
|
|
||||||
class TestAdminAccess(EndpointTestCase):
|
|
||||||
__metaclass__ = _SpecTestBuilder
|
|
||||||
spec_func = build_verbs_specs
|
|
||||||
result_attr = 'admin_code'
|
|
||||||
auth_username = ADMIN_ACCESS_USER
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
|
@ -1,22 +1,17 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"jsx": "react",
|
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"outDir": "./build/",
|
"outDir": "./build/",
|
||||||
"target": "es5",
|
"target": "es5",
|
||||||
"lib": ["es2017", "dom"],
|
"lib": ["es2017", "dom"],
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true
|
||||||
"paths": {
|
|
||||||
"sass/*": ["./static/css/directives/components/pages/*"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"node_modules"
|
"node_modules"
|
||||||
],
|
],
|
||||||
"include": [
|
"include": [
|
||||||
"./static/js/**/*.tsx",
|
|
||||||
"./static/js/**/*.ts"
|
"./static/js/**/*.ts"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
30
tslint.json
30
tslint.json
|
@ -1,5 +1,29 @@
|
||||||
{
|
{
|
||||||
"rules": {
|
"rules": {
|
||||||
"no-default-export": true
|
"no-default-export": true,
|
||||||
|
"member-access": true,
|
||||||
|
"member-ordering": [true, {"order": "fields-first"}],
|
||||||
|
"no-empty-interface": true,
|
||||||
|
"no-namespace": true,
|
||||||
|
"no-reference": true,
|
||||||
|
"curly": true,
|
||||||
|
"no-conditional-assignment": true,
|
||||||
|
"no-duplicate-super": true,
|
||||||
|
"no-empty": true,
|
||||||
|
"no-invalid-template-strings": true,
|
||||||
|
"no-misused-new": true,
|
||||||
|
"no-shadowed-variable": true,
|
||||||
|
"no-unbound-method": true,
|
||||||
|
"restrict-plus-operands": true,
|
||||||
|
"eofline": true,
|
||||||
|
"indent": [true, "spaces", 2],
|
||||||
|
"max-line-length": [true, 120],
|
||||||
|
"class-name": true,
|
||||||
|
"import-spacing": true,
|
||||||
|
"align": true,
|
||||||
|
"new-parens": true,
|
||||||
|
"semicolon": true,
|
||||||
|
"space-before-function-paren": [true, "never"],
|
||||||
|
"whitespace": [true, "check-decl", "check-operator", "check-module", "check-separator", "check-type", "check-preblock"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
{
|
|
||||||
"globalDependencies": {
|
|
||||||
"react": "registry:dt/react#0.14.0+20160927082313",
|
|
||||||
"react-dom": "registry:dt/react-dom#0.14.0+20160412154040"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -387,18 +387,27 @@ class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface):
|
||||||
response = self._call('GET', _API_METHOD_GET_LAYER % layer_id, params=params)
|
response = self._call('GET', _API_METHOD_GET_LAYER % layer_id, params=params)
|
||||||
logger.debug('Got response %s for vulnerabilities for layer %s',
|
logger.debug('Got response %s for vulnerabilities for layer %s',
|
||||||
response.status_code, layer_id)
|
response.status_code, layer_id)
|
||||||
|
try:
|
||||||
|
return response.json()
|
||||||
|
except ValueError:
|
||||||
|
logger.exception('Failed to decode response JSON')
|
||||||
|
return None
|
||||||
|
|
||||||
except Non200ResponseException as ex:
|
except Non200ResponseException as ex:
|
||||||
logger.debug('Got failed response %s for vulnerabilities for layer %s',
|
logger.debug('Got failed response %s for vulnerabilities for layer %s',
|
||||||
ex.response.status_code, layer_id)
|
ex.response.status_code, layer_id)
|
||||||
if ex.response.status_code == 404:
|
if ex.response.status_code == 404:
|
||||||
return None
|
return None
|
||||||
elif ex.response.status_code // 100 == 5:
|
else:
|
||||||
logger.error(
|
logger.error(
|
||||||
'downstream security service failure: status %d, text: %s',
|
'downstream security service failure: status %d, text: %s',
|
||||||
ex.response.status_code,
|
ex.response.status_code,
|
||||||
ex.response.text,
|
ex.response.text,
|
||||||
)
|
)
|
||||||
raise APIRequestFailure('Downstream service returned 5xx')
|
if ex.response.status_code // 100 == 5:
|
||||||
|
raise APIRequestFailure('Downstream service returned 5xx')
|
||||||
|
else:
|
||||||
|
raise APIRequestFailure('Downstream service returned non-200')
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
raise APIRequestFailure('API call timed out')
|
raise APIRequestFailure('API call timed out')
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
|
@ -407,11 +416,6 @@ class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface):
|
||||||
logger.exception('Failed to get layer data response for %s', layer_id)
|
logger.exception('Failed to get layer data response for %s', layer_id)
|
||||||
raise APIRequestFailure()
|
raise APIRequestFailure()
|
||||||
|
|
||||||
try:
|
|
||||||
return response.json()
|
|
||||||
except ValueError:
|
|
||||||
logger.exception('Failed to decode response JSON')
|
|
||||||
|
|
||||||
|
|
||||||
def _request(self, method, endpoint, path, body, params, timeout):
|
def _request(self, method, endpoint, path, body, params, timeout):
|
||||||
""" Issues an HTTP request to the security endpoint. """
|
""" Issues an HTTP request to the security endpoint. """
|
||||||
|
|
|
@ -33,6 +33,7 @@ class FakeSecurityScanner(object):
|
||||||
self.fail_layer_id = None
|
self.fail_layer_id = None
|
||||||
self.internal_error_layer_id = None
|
self.internal_error_layer_id = None
|
||||||
self.error_layer_id = None
|
self.error_layer_id = None
|
||||||
|
self.unexpected_status_layer_id = None
|
||||||
|
|
||||||
def set_ok_layer_id(self, ok_layer_id):
|
def set_ok_layer_id(self, ok_layer_id):
|
||||||
""" Sets a layer ID that, if encountered when the analyze call is made, causes a 200
|
""" Sets a layer ID that, if encountered when the analyze call is made, causes a 200
|
||||||
|
@ -58,6 +59,12 @@ class FakeSecurityScanner(object):
|
||||||
"""
|
"""
|
||||||
self.error_layer_id = error_layer_id
|
self.error_layer_id = error_layer_id
|
||||||
|
|
||||||
|
def set_unexpected_status_layer_id(self, layer_id):
|
||||||
|
""" Sets a layer ID that, if encountered when the analyze call is made, causes an HTTP 600
|
||||||
|
to be raised. This is useful in testing the robustness of the to unknown status codes.
|
||||||
|
"""
|
||||||
|
self.unexpected_status_layer_id = layer_id
|
||||||
|
|
||||||
def has_layer(self, layer_id):
|
def has_layer(self, layer_id):
|
||||||
""" Returns true if the layer with the given ID has been analyzed. """
|
""" Returns true if the layer with the given ID has been analyzed. """
|
||||||
return layer_id in self.layers
|
return layer_id in self.layers
|
||||||
|
@ -252,6 +259,13 @@ class FakeSecurityScanner(object):
|
||||||
'content': json.dumps({'Error': {'Message': 'Some sort of error'}}),
|
'content': json.dumps({'Error': {'Message': 'Some sort of error'}}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if layer['Name'] == self.unexpected_status_layer_id:
|
||||||
|
return {
|
||||||
|
'status_code': 600,
|
||||||
|
'content': json.dumps({'Error': {'Message': 'Some sort of error'}}),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
parent_id = layer.get('ParentName', None)
|
parent_id = layer.get('ParentName', None)
|
||||||
parent_layer = None
|
parent_layer = None
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ var config = {
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [
|
||||||
{
|
{
|
||||||
test: /\.tsx?$/,
|
test: /\.ts?$/,
|
||||||
use: ["ts-loader"],
|
use: ["ts-loader"],
|
||||||
exclude: /node_modules/
|
exclude: /node_modules/
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,115 +0,0 @@
|
||||||
import logging.config
|
|
||||||
import time
|
|
||||||
|
|
||||||
from math import log10
|
|
||||||
|
|
||||||
import features
|
|
||||||
|
|
||||||
from app import app, secscan_api, prometheus
|
|
||||||
from workers.worker import Worker
|
|
||||||
from data.database import UseThenDisconnect
|
|
||||||
from data.model.image import (get_images_eligible_for_scan, get_image_pk_field,
|
|
||||||
get_max_id_for_sec_scan, get_min_id_for_sec_scan)
|
|
||||||
from util.secscan.api import SecurityConfigValidator, APIRequestFailure
|
|
||||||
from util.secscan.analyzer import LayerAnalyzer, PreemptedException
|
|
||||||
from util.migrate.allocator import yield_random_entries
|
|
||||||
from util.log import logfile_path
|
|
||||||
from endpoints.v2 import v2_bp
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_INDEXING_INTERVAL = 30
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
unscanned_images_gauge = prometheus.create_gauge('unscanned_images',
|
|
||||||
'Number of images that clair needs to scan.')
|
|
||||||
max_unscanned_images_gauge = prometheus.create_gauge('max_unscanned_image_id',
|
|
||||||
'Max ID of the unscanned images.')
|
|
||||||
|
|
||||||
class SecurityWorker(Worker):
|
|
||||||
def __init__(self):
|
|
||||||
super(SecurityWorker, self).__init__()
|
|
||||||
validator = SecurityConfigValidator(app.config)
|
|
||||||
if not validator.valid():
|
|
||||||
logger.warning('Failed to validate security scan configuration')
|
|
||||||
return
|
|
||||||
|
|
||||||
self._target_version = app.config.get('SECURITY_SCANNER_ENGINE_VERSION_TARGET', 3)
|
|
||||||
self._analyzer = LayerAnalyzer(app.config, secscan_api)
|
|
||||||
self._min_id = None
|
|
||||||
|
|
||||||
interval = app.config.get('SECURITY_SCANNER_INDEXING_INTERVAL', DEFAULT_INDEXING_INTERVAL)
|
|
||||||
self.add_operation(self._index_images, interval)
|
|
||||||
|
|
||||||
def _index_images(self):
|
|
||||||
def batch_query():
|
|
||||||
return get_images_eligible_for_scan(self._target_version)
|
|
||||||
|
|
||||||
# Get the ID of the last image we can analyze. Will be None if there are no images in the
|
|
||||||
# database.
|
|
||||||
max_id = get_max_id_for_sec_scan()
|
|
||||||
if max_id is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.min_id is None or self.min_id > max_id:
|
|
||||||
logger.info('Could not find any available images for scanning.')
|
|
||||||
return
|
|
||||||
|
|
||||||
max_unscanned_images_gauge.Set(max_id)
|
|
||||||
|
|
||||||
# 4^log10(total) gives us a scalable batch size into the billions.
|
|
||||||
batch_size = int(4 ** log10(max(10, max_id - self.min_id)))
|
|
||||||
|
|
||||||
with UseThenDisconnect(app.config):
|
|
||||||
to_scan_generator = yield_random_entries(
|
|
||||||
batch_query,
|
|
||||||
get_image_pk_field(),
|
|
||||||
batch_size,
|
|
||||||
max_id,
|
|
||||||
self.min_id,
|
|
||||||
)
|
|
||||||
for candidate, abt, num_remaining in to_scan_generator:
|
|
||||||
try:
|
|
||||||
self._analyzer.analyze_recursively(candidate)
|
|
||||||
except PreemptedException:
|
|
||||||
logger.info('Another worker pre-empted us for layer: %s', candidate.id)
|
|
||||||
abt.set()
|
|
||||||
except APIRequestFailure:
|
|
||||||
logger.exception('Security scanner service unavailable')
|
|
||||||
return
|
|
||||||
|
|
||||||
unscanned_images_gauge.Set(num_remaining)
|
|
||||||
|
|
||||||
# If we reach this point, we analyzed every images up to max_id, next time the worker runs,
|
|
||||||
# we want to start from the next image.
|
|
||||||
self.min_id = max_id + 1
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def min_id(self):
|
|
||||||
""" If it hasn't already been determined, finds the ID of the first image to be analyzed.
|
|
||||||
First checks the config, then the database, and returns None if there are no images
|
|
||||||
available for scanning.
|
|
||||||
"""
|
|
||||||
if self._min_id is None:
|
|
||||||
self._min_id = app.config.get('SECURITY_SCANNER_INDEXING_MIN_ID')
|
|
||||||
if self._min_id is None:
|
|
||||||
self._min_id = get_min_id_for_sec_scan(self._target_version)
|
|
||||||
return self._min_id
|
|
||||||
|
|
||||||
@min_id.setter
|
|
||||||
def min_id(self, new_min_id):
|
|
||||||
self._min_id = new_min_id
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
app.register_blueprint(v2_bp, url_prefix='/v2')
|
|
||||||
|
|
||||||
if not features.SECURITY_SCANNER:
|
|
||||||
logger.debug('Security scanner disabled; skipping SecurityWorker')
|
|
||||||
while True:
|
|
||||||
time.sleep(100000)
|
|
||||||
|
|
||||||
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
|
|
||||||
worker = SecurityWorker()
|
|
||||||
worker.start()
|
|
36
workers/securityworker/__init__.py
Normal file
36
workers/securityworker/__init__.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import logging.config
|
||||||
|
|
||||||
|
from app import app, prometheus
|
||||||
|
from data.database import UseThenDisconnect
|
||||||
|
from workers.securityworker.models_pre_oci import pre_oci_model as model
|
||||||
|
from util.secscan.api import APIRequestFailure
|
||||||
|
from util.secscan.analyzer import PreemptedException
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
unscanned_images_gauge = prometheus.create_gauge('unscanned_images',
|
||||||
|
'Number of images that clair needs to scan.')
|
||||||
|
|
||||||
|
|
||||||
|
def index_images(target_version, analyzer, token=None):
|
||||||
|
""" Performs security indexing of all images in the database not scanned at the target version.
|
||||||
|
If a token is provided, scanning will begin where the token indicates it previously completed.
|
||||||
|
"""
|
||||||
|
iterator, next_token = model.candidates_to_scan(target_version, start_token=token)
|
||||||
|
if iterator is None:
|
||||||
|
logger.debug('Found no additional images to scan')
|
||||||
|
return None
|
||||||
|
|
||||||
|
with UseThenDisconnect(app.config):
|
||||||
|
for candidate, abt, num_remaining in iterator:
|
||||||
|
try:
|
||||||
|
analyzer.analyze_recursively(candidate)
|
||||||
|
except PreemptedException:
|
||||||
|
logger.info('Another worker pre-empted us for layer: %s', candidate.id)
|
||||||
|
abt.set()
|
||||||
|
except APIRequestFailure:
|
||||||
|
logger.exception('Security scanner service unavailable')
|
||||||
|
return
|
||||||
|
|
||||||
|
unscanned_images_gauge.Set(num_remaining)
|
||||||
|
|
||||||
|
return next_token
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue