Merge branch 'bobthe' into tutorial
17
README.md
|
@ -14,13 +14,22 @@ virtualenv --distribute venv
|
|||
source venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
sudo gdebi --n binary_dependencies/*.deb
|
||||
sudo cp conf/logrotate/* /etc/logrotate.d/
|
||||
```
|
||||
|
||||
running:
|
||||
|
||||
```
|
||||
sudo mkdir -p /mnt/nginx/ && sudo /usr/local/nginx/sbin/nginx -c `pwd`/nginx.conf
|
||||
STACK=prod gunicorn -c gunicorn_config.py application:application
|
||||
sudo mkdir -p /mnt/logs/ && sudo chown $USER /mnt/logs/ && sudo /usr/local/nginx/sbin/nginx -c `pwd`/conf/nginx.conf
|
||||
sudo mkdir -p /mnt/logs/ && sudo chown $USER /mnt/logs/ && STACK=prod gunicorn -c conf/gunicorn_config.py application:application
|
||||
```
|
||||
|
||||
start the log shipper:
|
||||
|
||||
```
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
sudo docker pull quay.io/quay/logstash
|
||||
sudo docker run -d -e REDIS_PORT_6379_TCP_ADDR=logs.quay.io -v /mnt/logs:/mnt/logs quay.io/quay/logstash quay.conf
|
||||
```
|
||||
|
||||
start the workers:
|
||||
|
@ -34,8 +43,8 @@ STACK=prod python -m workers.webhookworker -D
|
|||
bouncing the servers:
|
||||
|
||||
```
|
||||
sudo kill -HUP <pid of nginx>
|
||||
kill -HUP <pid of gunicorn>
|
||||
sudo kill -HUP `cat /mnt/logs/nginx.pid`
|
||||
kill -HUP `cat /mnt/logs/gunicorn.pid`
|
||||
|
||||
kill <pids of worker daemons>
|
||||
restart daemons
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from app import app as application
|
||||
from data.model import db as model_db
|
||||
|
||||
|
||||
logging.basicConfig(**application.config['LOGGING_CONFIG'])
|
||||
# Initialize logging
|
||||
application.config['LOGGING_CONFIG']()
|
||||
|
||||
# Turn off debug logging for boto
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
from endpoints.api import api
|
||||
from endpoints.index import index
|
||||
|
@ -29,6 +34,16 @@ application.register_blueprint(api, url_prefix='/api')
|
|||
application.register_blueprint(webhooks, url_prefix='/webhooks')
|
||||
application.register_blueprint(realtime, url_prefix='/realtime')
|
||||
|
||||
|
||||
def close_db(exc):
|
||||
db = model_db
|
||||
if not db.is_closed():
|
||||
logger.debug('Disconnecting from database.')
|
||||
db.close()
|
||||
|
||||
application.teardown_request(close_db)
|
||||
|
||||
|
||||
# Remove this for prod config
|
||||
application.debug = True
|
||||
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
FROM lopter/raring-base
|
||||
MAINTAINER jake@devtable.com
|
||||
|
||||
RUN echo deb http://archive.ubuntu.com/ubuntu precise universe > /etc/apt/sources.list.d/universe.list
|
||||
RUN apt-get update -qq
|
||||
RUN apt-get install -qqy iptables ca-certificates lxc python-virtualenv git python-dev xz-utils aufs-tools
|
||||
|
||||
# This will use the latest public release. To use your own, comment it out...
|
||||
ADD https://get.docker.io/builds/Linux/x86_64/docker-latest /usr/local/bin/docker
|
||||
# ...then uncomment the following line, and copy your docker binary to current dir.
|
||||
#ADD ./docker /usr/local/bin/docker
|
||||
|
||||
# Install the files
|
||||
ADD ./startserver /usr/local/bin/startserver
|
||||
ADD ./buildserver.py ./buildserver.py
|
||||
ADD ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN chmod +x /usr/local/bin/docker /usr/local/bin/startserver
|
||||
|
||||
RUN virtualenv --distribute venv
|
||||
RUN venv/bin/pip install -r requirements.txt
|
||||
|
||||
VOLUME /var/lib/docker
|
||||
|
||||
EXPOSE 5002
|
||||
CMD startserver
|
|
@ -1,13 +0,0 @@
|
|||
To build:
|
||||
|
||||
```
|
||||
sudo docker build -t quay.io/quay/buildserver .
|
||||
sudo docker push quay.io/quay/buildserver
|
||||
```
|
||||
|
||||
To run:
|
||||
|
||||
```
|
||||
sudo docker pull quay.io/quay/buildserver
|
||||
sudo docker run -d -privileged -lxc-conf="lxc.aa_profile=unconfined" quay.io/quay/buildserver
|
||||
```
|
|
@ -1,214 +0,0 @@
|
|||
import docker
|
||||
import logging
|
||||
import shutil
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import json
|
||||
|
||||
from flask import Flask, jsonify, abort, make_response
|
||||
from zipfile import ZipFile
|
||||
from tempfile import TemporaryFile, mkdtemp
|
||||
from multiprocessing.pool import ThreadPool
|
||||
from base64 import b64encode
|
||||
|
||||
|
||||
BUFFER_SIZE = 8 * 1024
|
||||
LOG_FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - ' + \
|
||||
'%(funcName)s - %(message)s'
|
||||
|
||||
app = Flask(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def count_steps(dockerfile_path):
|
||||
with open(dockerfile_path, 'r') as dockerfileobj:
|
||||
steps = 0
|
||||
for line in dockerfileobj.readlines():
|
||||
stripped = line.strip()
|
||||
if stripped and stripped[0] is not '#':
|
||||
steps += 1
|
||||
return steps
|
||||
|
||||
|
||||
def prepare_zip(request_file):
|
||||
build_dir = mkdtemp(prefix='docker-build-')
|
||||
|
||||
# Save the zip file to temp somewhere
|
||||
with TemporaryFile() as zip_file:
|
||||
zip_file.write(request_file.content)
|
||||
to_extract = ZipFile(zip_file)
|
||||
to_extract.extractall(build_dir)
|
||||
|
||||
return build_dir
|
||||
|
||||
|
||||
def prepare_dockerfile(request_file):
|
||||
build_dir = mkdtemp(prefix='docker-build-')
|
||||
dockerfile_path = os.path.join(build_dir, "Dockerfile")
|
||||
with open(dockerfile_path, 'w') as dockerfile:
|
||||
dockerfile.write(request_file.content)
|
||||
|
||||
return build_dir
|
||||
|
||||
|
||||
def total_completion(statuses, total_images):
|
||||
percentage_with_sizes = float(len(statuses.values()))/total_images
|
||||
sent_bytes = sum([status[u'current'] for status in statuses.values()])
|
||||
total_bytes = sum([status[u'total'] for status in statuses.values()])
|
||||
return float(sent_bytes)/total_bytes*percentage_with_sizes
|
||||
|
||||
|
||||
def build_image(build_dir, tag_name, num_steps, result_object):
|
||||
try:
|
||||
logger.debug('Starting build.')
|
||||
docker_cl = docker.Client(timeout=1200)
|
||||
result_object['status'] = 'building'
|
||||
build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True)
|
||||
|
||||
current_step = 0
|
||||
built_image = None
|
||||
for status in build_status:
|
||||
# logger.debug('Status: %s', str(status))
|
||||
step_increment = re.search(r'Step ([0-9]+) :', status)
|
||||
if step_increment:
|
||||
current_step = int(step_increment.group(1))
|
||||
logger.debug('Step now: %s/%s' % (current_step, num_steps))
|
||||
result_object['current_command'] = current_step
|
||||
continue
|
||||
|
||||
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
|
||||
if complete:
|
||||
built_image = complete.group(1)
|
||||
logger.debug('Final image ID is: %s' % built_image)
|
||||
continue
|
||||
|
||||
shutil.rmtree(build_dir)
|
||||
|
||||
# Get the image count
|
||||
if not built_image:
|
||||
result_object['status'] = 'error'
|
||||
result_object['message'] = 'Unable to build dockerfile.'
|
||||
return
|
||||
|
||||
history = json.loads(docker_cl.history(built_image))
|
||||
num_images = len(history)
|
||||
result_object['total_images'] = num_images
|
||||
|
||||
result_object['status'] = 'pushing'
|
||||
logger.debug('Pushing to tag name: %s' % tag_name)
|
||||
resp = docker_cl.push(tag_name, stream=True)
|
||||
|
||||
for status_str in resp:
|
||||
status = json.loads(status_str)
|
||||
logger.debug('Status: %s', status_str)
|
||||
if u'status' in status:
|
||||
status_msg = status[u'status']
|
||||
|
||||
if status_msg == 'Pushing':
|
||||
if u'progressDetail' in status and u'id' in status:
|
||||
image_id = status[u'id']
|
||||
detail = status[u'progressDetail']
|
||||
|
||||
if u'current' in detail and 'total' in detail:
|
||||
images = result_object['image_completion']
|
||||
|
||||
images[image_id] = detail
|
||||
result_object['push_completion'] = total_completion(images,
|
||||
num_images)
|
||||
|
||||
elif u'errorDetail' in status:
|
||||
result_object['status'] = 'error'
|
||||
if u'message' in status[u'errorDetail']:
|
||||
result_object['message'] = str(status[u'errorDetail'][u'message'])
|
||||
return
|
||||
|
||||
result_object['status'] = 'complete'
|
||||
except Exception as e:
|
||||
logger.exception('Exception when processing request.')
|
||||
result_object['status'] = 'error'
|
||||
result_object['message'] = str(e.message)
|
||||
|
||||
|
||||
MIME_PROCESSORS = {
|
||||
'application/zip': prepare_zip,
|
||||
'text/plain': prepare_dockerfile,
|
||||
'application/octet-stream': prepare_dockerfile,
|
||||
}
|
||||
|
||||
# If this format it should also be changed in the api method get_repo_builds
|
||||
build = {
|
||||
'total_commands': None,
|
||||
'current_command': None,
|
||||
'push_completion': 0.0,
|
||||
'status': 'waiting',
|
||||
'message': None,
|
||||
'image_completion': {},
|
||||
}
|
||||
pool = ThreadPool(1)
|
||||
|
||||
|
||||
@app.before_first_request
|
||||
def start_build():
|
||||
resource_url = os.environ['RESOURCE_URL']
|
||||
tag_name = os.environ['TAG']
|
||||
acccess_token = os.environ['TOKEN']
|
||||
|
||||
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
|
||||
(resource_url, tag_name, acccess_token))
|
||||
|
||||
# Save the token
|
||||
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
|
||||
if host:
|
||||
docker_endpoint = 'http://%s/v1/' % host.group(1)
|
||||
dockercfg_path = os.path.join(os.environ.get('HOME', '.'), '.dockercfg')
|
||||
token = b64encode('$token:%s' % acccess_token)
|
||||
with open(dockercfg_path, 'w') as dockercfg:
|
||||
payload = {
|
||||
docker_endpoint: {
|
||||
'auth': token,
|
||||
'email': '',
|
||||
}
|
||||
}
|
||||
dockercfg.write(json.dumps(payload))
|
||||
|
||||
else:
|
||||
raise Exception('Invalid tag name: %s' % tag_name)
|
||||
|
||||
docker_resource = requests.get(resource_url)
|
||||
c_type = docker_resource.headers['content-type']
|
||||
|
||||
logger.info('Request to build file of type: %s with tag: %s' %
|
||||
(c_type, tag_name))
|
||||
|
||||
if c_type not in MIME_PROCESSORS:
|
||||
raise Exception('Invalid dockerfile content type: %s' % c_type)
|
||||
|
||||
build_dir = MIME_PROCESSORS[c_type](docker_resource)
|
||||
|
||||
dockerfile_path = os.path.join(build_dir, "Dockerfile")
|
||||
num_steps = count_steps(dockerfile_path)
|
||||
logger.debug('Dockerfile had %s steps' % num_steps)
|
||||
|
||||
logger.info('Sending job to builder pool.')
|
||||
build['total_commands'] = num_steps
|
||||
|
||||
pool.apply_async(build_image, [build_dir, tag_name, num_steps,
|
||||
build])
|
||||
|
||||
|
||||
@app.route('/build/', methods=['GET'])
|
||||
def get_status():
|
||||
if build:
|
||||
return jsonify(build)
|
||||
abort(404)
|
||||
|
||||
|
||||
@app.route('/status/', methods=['GET'])
|
||||
def health_check():
|
||||
return make_response('Running')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
||||
app.run(host='0.0.0.0', port=5002, threaded=True)
|
|
@ -1,5 +0,0 @@
|
|||
mock==1.0.1
|
||||
requests==1.2.3
|
||||
six==1.3.0
|
||||
flask==0.10.1
|
||||
-e git+git://github.com/DevTable/docker-py.git#egg=docker-py
|
|
@ -1,48 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# First, make sure that cgroups are mounted correctly.
|
||||
CGROUP=/sys/fs/cgroup
|
||||
|
||||
[ -d $CGROUP ] ||
|
||||
mkdir $CGROUP
|
||||
|
||||
mountpoint -q $CGROUP ||
|
||||
mount -n -t tmpfs -o uid=0,gid=0,mode=0755 cgroup $CGROUP || {
|
||||
echo "Could not make a tmpfs mount. Did you use -privileged?"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Mount the cgroup hierarchies exactly as they are in the parent system.
|
||||
for SUBSYS in $(cut -d: -f2 /proc/1/cgroup)
|
||||
do
|
||||
[ -d $CGROUP/$SUBSYS ] || mkdir $CGROUP/$SUBSYS
|
||||
mountpoint -q $CGROUP/$SUBSYS ||
|
||||
mount -n -t cgroup -o $SUBSYS cgroup $CGROUP/$SUBSYS
|
||||
done
|
||||
|
||||
# Note: as I write those lines, the LXC userland tools cannot setup
|
||||
# a "sub-container" properly if the "devices" cgroup is not in its
|
||||
# own hierarchy. Let's detect this and issue a warning.
|
||||
grep -q :devices: /proc/1/cgroup ||
|
||||
echo "WARNING: the 'devices' cgroup should be in its own hierarchy."
|
||||
grep -qw devices /proc/1/cgroup ||
|
||||
echo "WARNING: it looks like the 'devices' cgroup is not mounted."
|
||||
|
||||
# Now, close extraneous file descriptors.
|
||||
pushd /proc/self/fd
|
||||
for FD in *
|
||||
do
|
||||
case "$FD" in
|
||||
# Keep stdin/stdout/stderr
|
||||
[012])
|
||||
;;
|
||||
# Nuke everything else
|
||||
*)
|
||||
eval exec "$FD>&-"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
popd
|
||||
|
||||
docker -d &
|
||||
exec venv/bin/python buildserver.py
|
60
conf/cloud-init.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
#! /bin/sh
|
||||
|
||||
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9
|
||||
sh -c "echo deb http://get.docker.io/ubuntu docker main > /etc/apt/sources.list.d/docker.list"
|
||||
apt-get update
|
||||
|
||||
apt-get install -y git python-virtualenv python-dev phantomjs libjpeg8 libjpeg62-dev libfreetype6 libfreetype6-dev libevent-dev gdebi-core lxc-docker
|
||||
|
||||
PRIVATE_KEY=/root/.ssh/id_rsa
|
||||
echo '-----BEGIN RSA PRIVATE KEY-----' > $PRIVATE_KEY
|
||||
echo 'MIIEpAIBAAKCAQEA1qYjqPJAOHzE9jyE06LgOYFXtmVWMMPdS10oWUH77/M406/l' >> $PRIVATE_KEY
|
||||
echo 'BG1Nf8VU2/q7VogfR/k56xumlAYcoEP9rueEMI9j2RwDy2s5SHaT7Z+9SyZnTRtq' >> $PRIVATE_KEY
|
||||
echo 'bomTUHVBQtxgRXz2XHROWtFG54MhZtIHDk31kW2qyr+rMw2/kT1h6+s9D1mF5A5i' >> $PRIVATE_KEY
|
||||
echo 'DWxNQSWYyS9gaM5a5aNUVscoXAtSG7JwY4XdYEGKXwMm7UYFeHlOPH/QRTZVO9XP' >> $PRIVATE_KEY
|
||||
echo 'Z/vNW1t6JZ9GIAxfFP9v2YyehF3l2R+m3VGDld4JNosUPyWOnMPbHBcTYGe2nLgj' >> $PRIVATE_KEY
|
||||
echo 'zH9mqhXKR0jR2hbo0QJz5ln8TXmj5v3mfPrF1QIDAQABAoIBAC52Y/2sAm63w0Kx' >> $PRIVATE_KEY
|
||||
echo 'subEuNh5wOzAXrnLi9lGXveDKu+zrDdWObKNnlrr8gRz7505ddv0fK8BmzsrX4Lp' >> $PRIVATE_KEY
|
||||
echo 'dL4paxm/0BMs1z1vBkVDNZ4YF7dupqmwJ4epy/N8jhXU8hnYhNNacaOC7WArqE1D' >> $PRIVATE_KEY
|
||||
echo 'ZTeZdHB4VqHwfzRb432i1dFlaCAsEQ+pRg+o0wOqH5BMZy4LY5vESK5d2E85KhqT' >> $PRIVATE_KEY
|
||||
echo '1rgD2T2FrkM42H4QvYzn6ntmjRAA5eO6RSeyPlkpniNTlmSuNYt8iqx8bm1HgXFn' >> $PRIVATE_KEY
|
||||
echo 'Iova/9MifFt9CFG5SJPmYkPYvAEhNmiRdob68a/0BIX+Uuc1skX72Lpb/XjqrlXZ' >> $PRIVATE_KEY
|
||||
echo 'UhJYALkCgYEA9fPGq9bGNWodCwplXuq5ydZv1BK5NZod+H85hUOz+gUN12UJ3Euy' >> $PRIVATE_KEY
|
||||
echo 'FAZZqV5kwQ0i1cE6Vfg9SSk1V9osdw3TIVZgTOBKBYxsuCJzIO4zlyM7qi0XFsam' >> $PRIVATE_KEY
|
||||
echo 'ax/v/kfHFnoBOPruJs0Ao5F4cGhZBfS4dQZAh4EqplSjJuGoLVMbNTsCgYEA32r8' >> $PRIVATE_KEY
|
||||
echo 'kspbaCK71hDc2vAxVpHR3UNSui6lQCKOC4BbA8c1XP08+BKPONeNMaytXiRe5Vrq' >> $PRIVATE_KEY
|
||||
echo 'bXRf9GqY6zzM08If78qjgDd2cfVYPnrb8unth7Z7QbsSi5+E6Gt8cevBEQqv1n6H' >> $PRIVATE_KEY
|
||||
echo 'jzLKlETL5qpMpRHJi98AvyHcSpYyI6XORZE0AC8CgYEAwJJDPq5l+NKBtPBJ2Jxu' >> $PRIVATE_KEY
|
||||
echo 'JUN5wZF7ZCWsS7HJZrdQxnSIltpscwjtgFJMh5j5yFGxsa2eMEuyKINUWdngMMMp' >> $PRIVATE_KEY
|
||||
echo 'SRPpSKfgLSH6yd1nSSRYToDuqVqulk2pZXzXGsA2eDnElUmbh9PBKVCv/UsmUMyA' >> $PRIVATE_KEY
|
||||
echo 'VFg11CLlMuBX8gyC8iH8zpsCgYB2NxDfxuzoxAApu5Bw1Ej26n9mGTpLw2Sy89W/' >> $PRIVATE_KEY
|
||||
echo 'JjKCZETLKD+7b26TABL4psqxFoOTzjBerAYduM2jIu+qWHw3kDxFGpO0psIDhVSe' >> $PRIVATE_KEY
|
||||
echo 'SsLhXWAInqiockaMCFu3l6v3jXUPBLJLxe9E1sYhDhkx+qBvPxcRCySZ3rE3BYOI' >> $PRIVATE_KEY
|
||||
echo 'cdVXBwKBgQD1Wp1eLdnA3UV2KzyyVG3K/FqKszte70NfR9gvl6bD8cGeoAAt+iyW' >> $PRIVATE_KEY
|
||||
echo 'Wd3tc3FKcDfywRoxrc4Atew0ySZVv78E2vDiyAswMhsbdldALw0sftaTIfdkXzlO' >> $PRIVATE_KEY
|
||||
echo '77cUl9A2niF4mf0b8JeIGrTR81f3Q/ZRjzXMg/dZLVMtzPsFd9clGw==' >> $PRIVATE_KEY
|
||||
echo '-----END RSA PRIVATE KEY-----' >> $PRIVATE_KEY
|
||||
chmod 600 $PRIVATE_KEY
|
||||
|
||||
BITBUCKET=AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kNvEcqTKl/VqLat/MaB33pZy0y3rJZtnqwR2qOOvbwKZYKiEO1O6VqNEBxKvJJelCq0dTXWT5pbO2gDXC6h6QDXCaHo6pOHGPUy+YBaGQRGuSusMEASYiWunYN0vCAI8QaXnWMXNMdFP3jHAJH0eDsoiGnLPBlBp4TNm6rYI74nMzgz3B9IikW4WVK+dc8KZJZWYjAuORU3jc1c/NPskD2ASinf8v3xnfXeukU0sJ5N6m5E8VLjObPEO+mN2t/FZTMZLiFqPWc/ALSqnMnnhwrNi2rbfg/rd/IpL8Le3pSBne8+seeFVBoGqzHM9yXw==
|
||||
KNOWN_HOSTS=/root/.ssh/known_hosts
|
||||
echo "|1|7Yac4eoTmXJj7g7Hdlz0PdJMNnQ=|5AckfCb6pvVav45AOBMStvCVwFk= ssh-rsa $BITBUCKET" >> $KNOWN_HOSTS
|
||||
echo "|1|epKB6bDLmj4UCWcN2lJ9NT+WjS4=|MThQkD3gLXsDEdRGD15uBlI6j5Q= ssh-rsa $BITBUCKET" >> $KNOWN_HOSTS
|
||||
echo "|1|tET4d+sodv8Zk+m/JXHj3OWpyUU=|8lo5vpeKH6yiflQpV+aNEsSZBtw= ssh-rsa $BITBUCKET" >> $KNOWN_HOSTS
|
||||
|
||||
export USER=ubuntu
|
||||
|
||||
git clone git@bitbucket.org:yackob03/quay.git /home/$USER/quay
|
||||
cd /home/$USER/quay
|
||||
virtualenv --distribute venv
|
||||
venv/bin/pip install -r requirements.txt
|
||||
gdebi --n binary_dependencies/*.deb
|
||||
cp conf/logrotate/* /etc/logrotate.d/
|
||||
chown -R $USER:$USER /home/$USER/quay
|
||||
|
||||
mkdir -p /mnt/logs/ && chown $USER /mnt/logs/ && /usr/local/nginx/sbin/nginx -c `pwd`/conf/nginx.conf
|
||||
mkdir -p /mnt/logs/ && chown $USER /mnt/logs/ && STACK=prod sudo -u $USER -E venv/bin/gunicorn -c conf/gunicorn_config.py application:application
|
||||
|
||||
echo '{"https://quay.io/v1/": {"auth": "cXVheStkZXBsb3k6OVkxUFg3RDNJRTRLUFNHQ0lBTEgxN0VNNVYzWlRNUDhDTk5ISk5YQVEyTkpHQVM0OEJESDhKMVBVT1o4NjlNTA==", "email": ""}}' > /root/.dockercfg
|
||||
docker pull quay.io/quay/logstash
|
||||
docker run -d -e REDIS_PORT_6379_TCP_ADDR=logs.quay.io -v /mnt/logs:/mnt/logs quay.io/quay/logstash quay.conf
|
27
conf/deploy
Normal file
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpAIBAAKCAQEA1qYjqPJAOHzE9jyE06LgOYFXtmVWMMPdS10oWUH77/M406/l
|
||||
BG1Nf8VU2/q7VogfR/k56xumlAYcoEP9rueEMI9j2RwDy2s5SHaT7Z+9SyZnTRtq
|
||||
bomTUHVBQtxgRXz2XHROWtFG54MhZtIHDk31kW2qyr+rMw2/kT1h6+s9D1mF5A5i
|
||||
DWxNQSWYyS9gaM5a5aNUVscoXAtSG7JwY4XdYEGKXwMm7UYFeHlOPH/QRTZVO9XP
|
||||
Z/vNW1t6JZ9GIAxfFP9v2YyehF3l2R+m3VGDld4JNosUPyWOnMPbHBcTYGe2nLgj
|
||||
zH9mqhXKR0jR2hbo0QJz5ln8TXmj5v3mfPrF1QIDAQABAoIBAC52Y/2sAm63w0Kx
|
||||
subEuNh5wOzAXrnLi9lGXveDKu+zrDdWObKNnlrr8gRz7505ddv0fK8BmzsrX4Lp
|
||||
dL4paxm/0BMs1z1vBkVDNZ4YF7dupqmwJ4epy/N8jhXU8hnYhNNacaOC7WArqE1D
|
||||
ZTeZdHB4VqHwfzRb432i1dFlaCAsEQ+pRg+o0wOqH5BMZy4LY5vESK5d2E85KhqT
|
||||
1rgD2T2FrkM42H4QvYzn6ntmjRAA5eO6RSeyPlkpniNTlmSuNYt8iqx8bm1HgXFn
|
||||
Iova/9MifFt9CFG5SJPmYkPYvAEhNmiRdob68a/0BIX+Uuc1skX72Lpb/XjqrlXZ
|
||||
UhJYALkCgYEA9fPGq9bGNWodCwplXuq5ydZv1BK5NZod+H85hUOz+gUN12UJ3Euy
|
||||
FAZZqV5kwQ0i1cE6Vfg9SSk1V9osdw3TIVZgTOBKBYxsuCJzIO4zlyM7qi0XFsam
|
||||
ax/v/kfHFnoBOPruJs0Ao5F4cGhZBfS4dQZAh4EqplSjJuGoLVMbNTsCgYEA32r8
|
||||
kspbaCK71hDc2vAxVpHR3UNSui6lQCKOC4BbA8c1XP08+BKPONeNMaytXiRe5Vrq
|
||||
bXRf9GqY6zzM08If78qjgDd2cfVYPnrb8unth7Z7QbsSi5+E6Gt8cevBEQqv1n6H
|
||||
jzLKlETL5qpMpRHJi98AvyHcSpYyI6XORZE0AC8CgYEAwJJDPq5l+NKBtPBJ2Jxu
|
||||
JUN5wZF7ZCWsS7HJZrdQxnSIltpscwjtgFJMh5j5yFGxsa2eMEuyKINUWdngMMMp
|
||||
SRPpSKfgLSH6yd1nSSRYToDuqVqulk2pZXzXGsA2eDnElUmbh9PBKVCv/UsmUMyA
|
||||
VFg11CLlMuBX8gyC8iH8zpsCgYB2NxDfxuzoxAApu5Bw1Ej26n9mGTpLw2Sy89W/
|
||||
JjKCZETLKD+7b26TABL4psqxFoOTzjBerAYduM2jIu+qWHw3kDxFGpO0psIDhVSe
|
||||
SsLhXWAInqiockaMCFu3l6v3jXUPBLJLxe9E1sYhDhkx+qBvPxcRCySZ3rE3BYOI
|
||||
cdVXBwKBgQD1Wp1eLdnA3UV2KzyyVG3K/FqKszte70NfR9gvl6bD8cGeoAAt+iyW
|
||||
Wd3tc3FKcDfywRoxrc4Atew0ySZVv78E2vDiyAswMhsbdldALw0sftaTIfdkXzlO
|
||||
77cUl9A2niF4mf0b8JeIGrTR81f3Q/ZRjzXMg/dZLVMtzPsFd9clGw==
|
||||
-----END RSA PRIVATE KEY-----
|
1
conf/deploy.pub
Normal file
|
@ -0,0 +1 @@
|
|||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWpiOo8kA4fMT2PITTouA5gVe2ZVYww91LXShZQfvv8zjTr+UEbU1/xVTb+rtWiB9H+TnrG6aUBhygQ/2u54Qwj2PZHAPLazlIdpPtn71LJmdNG2puiZNQdUFC3GBFfPZcdE5a0UbngyFm0gcOTfWRbarKv6szDb+RPWHr6z0PWYXkDmINbE1BJZjJL2Bozlrlo1RWxyhcC1IbsnBjhd1gQYpfAybtRgV4eU48f9BFNlU71c9n+81bW3oln0YgDF8U/2/ZjJ6EXeXZH6bdUYOV3gk2ixQ/JY6cw9scFxNgZ7acuCPMf2aqFcpHSNHaFujRAnPmWfxNeaPm/eZ8+sXV jake@coreserver
|
10
conf/gunicorn_config.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
bind = 'unix:/tmp/gunicorn.sock'
|
||||
workers = 8
|
||||
worker_class = 'gevent'
|
||||
timeout = 2000
|
||||
daemon = True
|
||||
pidfile = '/mnt/logs/gunicorn.pid'
|
||||
errorlog = '/mnt/logs/application.log'
|
||||
loglevel = 'debug'
|
||||
logger_class = 'util.glogger.LogstashLogger'
|
||||
pythonpath = '.'
|
9
conf/gunicorn_local.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
bind = '0.0.0.0:5000'
|
||||
workers = 2
|
||||
worker_class = 'gevent'
|
||||
timeout = 2000
|
||||
daemon = False
|
||||
errorlog = '-'
|
||||
loglevel = 'debug'
|
||||
logger_class = 'util.glogger.LogstashLogger'
|
||||
pythonpath = '.'
|
5
conf/hosted-http-base.conf
Normal file
|
@ -0,0 +1,5 @@
|
|||
server {
|
||||
listen 80 default_server;
|
||||
server_name _;
|
||||
rewrite ^ https://$host$request_uri? permanent;
|
||||
}
|
33
conf/http-base.conf
Normal file
|
@ -0,0 +1,33 @@
|
|||
log_format logstash_json '{ "@timestamp": "$time_iso8601", '
|
||||
'"@fields": { '
|
||||
'"remote_addr": "$remote_addr", '
|
||||
'"remote_user": "$remote_user", '
|
||||
'"body_bytes_sent": "$body_bytes_sent", '
|
||||
'"request_time": "$request_time", '
|
||||
'"status": "$status", '
|
||||
'"request": "$request", '
|
||||
'"request_method": "$request_method", '
|
||||
'"http_referrer": "$http_referer", '
|
||||
'"http_user_agent": "$http_user_agent" } }';
|
||||
|
||||
types_hash_max_size 2048;
|
||||
include /usr/local/nginx/conf/mime.types.default;
|
||||
|
||||
default_type application/octet-stream;
|
||||
access_log /mnt/logs/nginx.access.log logstash_json;
|
||||
sendfile on;
|
||||
|
||||
gzip on;
|
||||
gzip_http_version 1.0;
|
||||
gzip_proxied any;
|
||||
gzip_min_length 500;
|
||||
gzip_disable "MSIE [1-6]\.";
|
||||
gzip_types text/plain text/xml text/css
|
||||
text/javascript application/x-javascript
|
||||
application/octet-stream;
|
||||
|
||||
upstream app_server {
|
||||
server unix:/tmp/gunicorn.sock fail_timeout=0;
|
||||
# For a TCP configuration:
|
||||
# server 192.168.0.7:8000 fail_timeout=0;
|
||||
}
|
41
conf/logrotate/quay-logrotate
Normal file
|
@ -0,0 +1,41 @@
|
|||
/mnt/logs/nginx.access.log {
|
||||
daily
|
||||
rotate 7
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 644 root root
|
||||
|
||||
postrotate
|
||||
kill -USR1 `cat /mnt/logs/nginx.pid`
|
||||
endscript
|
||||
}
|
||||
|
||||
/mnt/logs/nginx.error.log {
|
||||
daily
|
||||
rotate 7
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 644 root root
|
||||
|
||||
postrotate
|
||||
kill -USR1 `cat /mnt/logs/nginx.pid`
|
||||
endscript
|
||||
}
|
||||
|
||||
/mnt/logs/application.log {
|
||||
daily
|
||||
rotate 7
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 644 ubuntu ubuntu
|
||||
|
||||
postrotate
|
||||
kill -USR1 `cat /mnt/logs/gunicorn.pid`
|
||||
endscript
|
||||
}
|
18
conf/nginx-local.conf
Normal file
|
@ -0,0 +1,18 @@
|
|||
include root-base.conf;
|
||||
|
||||
worker_processes 2;
|
||||
|
||||
http {
|
||||
include http-base.conf;
|
||||
|
||||
server {
|
||||
include server-base.conf;
|
||||
|
||||
listen 5000 default;
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /home/jake/Projects/docker/quay/static/;
|
||||
}
|
||||
}
|
||||
}
|
30
conf/nginx-staging.conf
Normal file
|
@ -0,0 +1,30 @@
|
|||
include root-base.conf;
|
||||
|
||||
worker_processes 2;
|
||||
|
||||
user root nogroup;
|
||||
|
||||
http {
|
||||
include http-base.conf;
|
||||
|
||||
include hosted-http-base.conf;
|
||||
|
||||
server {
|
||||
include server-base.conf;
|
||||
|
||||
listen 443 default;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./certs/quay-staging-unified.cert;
|
||||
ssl_certificate_key ./certs/quay-staging.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /root/quay/static/;
|
||||
}
|
||||
}
|
||||
}
|
30
conf/nginx.conf
Normal file
|
@ -0,0 +1,30 @@
|
|||
include root-base.conf;
|
||||
|
||||
worker_processes 8;
|
||||
|
||||
user nobody nogroup;
|
||||
|
||||
http {
|
||||
include http-base.conf;
|
||||
|
||||
include hosted-http-base.conf;
|
||||
|
||||
server {
|
||||
include server-base.conf;
|
||||
|
||||
listen 443 default;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./certs/quay-unified.cert;
|
||||
ssl_certificate_key ./certs/quay.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /home/ubuntu/quay/static/;
|
||||
}
|
||||
}
|
||||
}
|
7
conf/root-base.conf
Normal file
|
@ -0,0 +1,7 @@
|
|||
pid /mnt/logs/nginx.pid;
|
||||
error_log /mnt/logs/nginx.error.log;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
accept_mutex off;
|
||||
}
|
24
conf/server-base.conf
Normal file
|
@ -0,0 +1,24 @@
|
|||
client_max_body_size 8G;
|
||||
client_body_temp_path /mnt/logs/client_body 1 2;
|
||||
server_name _;
|
||||
|
||||
keepalive_timeout 5;
|
||||
|
||||
if ($args ~ "_escaped_fragment_") {
|
||||
rewrite ^ /snapshot$uri;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
|
||||
proxy_request_buffering off;
|
||||
proxy_set_header Transfer-Encoding $http_transfer_encoding;
|
||||
|
||||
proxy_pass http://app_server;
|
||||
proxy_read_timeout 2000;
|
||||
proxy_temp_path /mnt/logs/proxy_temp 1 2;
|
||||
}
|
57
config.py
|
@ -1,20 +1,18 @@
|
|||
import logging
|
||||
import sys
|
||||
import os
|
||||
import logstash_formatter
|
||||
|
||||
from peewee import MySQLDatabase, SqliteDatabase
|
||||
from storage.s3 import S3Storage
|
||||
from storage.local import LocalStorage
|
||||
from data.userfiles import UserRequestFiles
|
||||
from data.buildlogs import BuildLogs
|
||||
from util import analytics
|
||||
|
||||
from test.teststorage import FakeStorage, FakeUserfiles
|
||||
from test import analytics as fake_analytics
|
||||
|
||||
|
||||
LOG_FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - ' + \
|
||||
'%(funcName)s - %(message)s'
|
||||
|
||||
|
||||
class FlaskConfig(object):
|
||||
SECRET_KEY = '1cb18882-6d12-440d-a4cc-b7430fb5f884'
|
||||
|
||||
|
@ -89,6 +87,10 @@ class S3Userfiles(AWSCredentials):
|
|||
AWSCredentials.REGISTRY_S3_BUCKET)
|
||||
|
||||
|
||||
class RedisBuildLogs(object):
|
||||
BUILDLOGS = BuildLogs('logs.quay.io')
|
||||
|
||||
|
||||
class StripeTestConfig(object):
|
||||
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y'
|
||||
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh'
|
||||
|
@ -138,12 +140,22 @@ class BuildNodeConfig(object):
|
|||
BUILD_NODE_PULL_TOKEN = 'F02O2E86CQLKZUQ0O81J8XDHQ6F0N1V36L9JTOEEK6GKKMT1GI8PTJQT4OU88Y6G'
|
||||
|
||||
|
||||
def logs_init_builder(level=logging.DEBUG):
|
||||
@staticmethod
|
||||
def init_logs():
|
||||
handler = logging.StreamHandler()
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(level)
|
||||
formatter = logstash_formatter.LogstashFormatter()
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
return init_logs
|
||||
|
||||
|
||||
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
|
||||
FakeAnalytics, StripeTestConfig):
|
||||
LOGGING_CONFIG = {
|
||||
'level': logging.WARN,
|
||||
'format': LOG_FORMAT
|
||||
}
|
||||
FakeAnalytics, StripeTestConfig, RedisBuildLogs):
|
||||
LOGGING_CONFIG = logs_init_builder(logging.WARN)
|
||||
POPULATE_DB_TEST_DATA = True
|
||||
TESTING = True
|
||||
INCLUDE_TEST_ENDPOINTS = True
|
||||
|
@ -151,11 +163,9 @@ class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
|
|||
|
||||
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
||||
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
||||
DigitalOceanConfig, BuildNodeConfig, S3Userfiles):
|
||||
LOGGING_CONFIG = {
|
||||
'level': logging.DEBUG,
|
||||
'format': LOG_FORMAT
|
||||
}
|
||||
DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
|
||||
RedisBuildLogs):
|
||||
LOGGING_CONFIG = logs_init_builder()
|
||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||
POPULATE_DB_TEST_DATA = True
|
||||
INCLUDE_TEST_ENDPOINTS = True
|
||||
|
@ -164,22 +174,15 @@ class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
|||
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||
StripeLiveConfig, MixpanelTestConfig,
|
||||
GitHubProdConfig, DigitalOceanConfig,
|
||||
BuildNodeConfig, S3Userfiles):
|
||||
LOGGING_CONFIG = {
|
||||
'level': logging.DEBUG,
|
||||
'format': LOG_FORMAT
|
||||
}
|
||||
BuildNodeConfig, S3Userfiles, RedisBuildLogs):
|
||||
LOGGING_CONFIG = logs_init_builder()
|
||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||
|
||||
|
||||
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
|
||||
StripeLiveConfig, MixpanelProdConfig,
|
||||
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
|
||||
S3Userfiles):
|
||||
LOGGING_CONFIG = {
|
||||
'stream': sys.stderr,
|
||||
'level': logging.DEBUG,
|
||||
'format': LOG_FORMAT,
|
||||
'filename': 'application.log',
|
||||
}
|
||||
S3Userfiles, RedisBuildLogs):
|
||||
|
||||
LOGGING_CONFIG = logs_init_builder()
|
||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||
|
|
56
data/buildlogs.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
import redis
|
||||
import json
|
||||
|
||||
|
||||
class BuildLogs(object):
|
||||
def __init__(self, redis_host):
|
||||
self._redis = redis.StrictRedis(host=redis_host)
|
||||
|
||||
@staticmethod
|
||||
def _logs_key(build_id):
|
||||
return 'builds/%s/logs' % build_id
|
||||
|
||||
def append_log_entry(self, build_id, log_obj):
|
||||
"""
|
||||
Appends the serialized form of log_obj to the end of the log entry list
|
||||
and returns the new length of the list.
|
||||
"""
|
||||
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
|
||||
|
||||
def append_log_message(self, build_id, log_message):
|
||||
"""
|
||||
Wraps the message in an envelope and push it to the end of the log entry
|
||||
list and returns the new length of the list.
|
||||
"""
|
||||
log_obj = {
|
||||
'message': log_message
|
||||
}
|
||||
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
|
||||
|
||||
def get_log_entries(self, build_id, start_index, end_index):
|
||||
"""
|
||||
Returns a tuple of the current length of the list and an iterable of the
|
||||
requested log entries. End index is inclusive.
|
||||
"""
|
||||
llen = self._redis.llen(self._logs_key(build_id))
|
||||
log_entries = self._redis.lrange(self._logs_key(build_id), start_index,
|
||||
end_index)
|
||||
return (llen, (json.loads(entry) for entry in log_entries))
|
||||
|
||||
@staticmethod
|
||||
def _status_key(build_id):
|
||||
return 'builds/%s/status' % build_id
|
||||
|
||||
def set_status(self, build_id, status_obj):
|
||||
"""
|
||||
Sets the status key for this build to json serialized form of the supplied
|
||||
obj.
|
||||
"""
|
||||
self._redis.set(self._status_key(build_id), json.dumps(status_obj))
|
||||
|
||||
def get_status(self, build_id):
|
||||
"""
|
||||
Loads the status information for the specified build id.
|
||||
"""
|
||||
fetched = self._redis.get(self._status_key(build_id))
|
||||
return json.loads(fetched) if fetched else None
|
|
@ -1,5 +1,6 @@
|
|||
import string
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from random import SystemRandom
|
||||
from datetime import datetime
|
||||
|
@ -12,16 +13,6 @@ logger = logging.getLogger(__name__)
|
|||
db = app.config['DB_DRIVER'](app.config['DB_NAME'],
|
||||
**app.config['DB_CONNECTION_ARGS'])
|
||||
|
||||
|
||||
def close_db(exc):
|
||||
if not db.is_closed():
|
||||
logger.debug('Disconnecting from database.')
|
||||
db.close()
|
||||
|
||||
|
||||
app.teardown_request(close_db)
|
||||
|
||||
|
||||
def random_string_generator(length=16):
|
||||
def random_string():
|
||||
random = SystemRandom()
|
||||
|
@ -30,6 +21,10 @@ def random_string_generator(length=16):
|
|||
return random_string
|
||||
|
||||
|
||||
def uuid_generator():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
class BaseModel(Model):
|
||||
class Meta:
|
||||
database = db
|
||||
|
@ -135,7 +130,7 @@ class RepositoryPermission(BaseModel):
|
|||
|
||||
class PermissionPrototype(BaseModel):
|
||||
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto')
|
||||
uuid = CharField()
|
||||
uuid = CharField(default=uuid_generator)
|
||||
activating_user = ForeignKeyField(User, index=True, null=True,
|
||||
related_name='userpermissionproto')
|
||||
delegate_user = ForeignKeyField(User, related_name='receivingpermission',
|
||||
|
@ -214,13 +209,12 @@ class RepositoryTag(BaseModel):
|
|||
|
||||
|
||||
class RepositoryBuild(BaseModel):
|
||||
repository = ForeignKeyField(Repository)
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
access_token = ForeignKeyField(AccessToken)
|
||||
resource_key = CharField()
|
||||
tag = CharField()
|
||||
build_node_id = IntegerField(null=True)
|
||||
phase = CharField(default='waiting')
|
||||
status_url = CharField(null=True)
|
||||
|
||||
|
||||
class QueueItem(BaseModel):
|
||||
|
|
|
@ -4,9 +4,7 @@ import datetime
|
|||
import dateutil.parser
|
||||
import operator
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from database import *
|
||||
from util.validation import *
|
||||
|
@ -728,8 +726,7 @@ def update_prototype_permission(org, uid, role_name):
|
|||
def add_prototype_permission(org, role_name, activating_user,
|
||||
delegate_user=None, delegate_team=None):
|
||||
new_role = Role.get(Role.name == role_name)
|
||||
uid = str(uuid.uuid4())
|
||||
return PermissionPrototype.create(org=org, uuid=uid, role=new_role,
|
||||
return PermissionPrototype.create(org=org, role=new_role,
|
||||
activating_user=activating_user,
|
||||
delegate_user=delegate_user, delegate_team=delegate_team)
|
||||
|
||||
|
@ -1248,13 +1245,18 @@ def load_token_data(code):
|
|||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
||||
|
||||
|
||||
def get_repository_build(request_dbid):
|
||||
try:
|
||||
return RepositoryBuild.get(RepositoryBuild.id == request_dbid)
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
msg = 'Unable to locate a build by id: %s' % request_dbid
|
||||
def get_repository_build(namespace_name, repository_name, build_uuid):
|
||||
joined = RepositoryBuild.select().join(Repository)
|
||||
fetched = list(joined.where(Repository.name == repository_name,
|
||||
Repository.namespace == namespace_name,
|
||||
RepositoryBuild.uuid == build_uuid))
|
||||
|
||||
if not fetched:
|
||||
msg = 'Unable to locate a build by id: %s' % build_uuid
|
||||
raise InvalidRepositoryBuildException(msg)
|
||||
|
||||
return fetched[0]
|
||||
|
||||
|
||||
def list_repository_builds(namespace_name, repository_name,
|
||||
include_inactive=True):
|
||||
|
|
213
endpoints/api.py
|
@ -31,12 +31,14 @@ from datetime import datetime, timedelta
|
|||
|
||||
store = app.config['STORAGE']
|
||||
user_files = app.config['USERFILES']
|
||||
build_logs = app.config['BUILDLOGS']
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
route_data = None
|
||||
|
||||
api = Blueprint('api', __name__)
|
||||
|
||||
|
||||
@api.before_request
|
||||
def csrf_protect():
|
||||
if request.method != "GET" and request.method != "HEAD":
|
||||
|
@ -45,7 +47,19 @@ def csrf_protect():
|
|||
|
||||
# TODO: add if not token here, once we are sure all sessions have a token.
|
||||
if token != found_token:
|
||||
abort(403)
|
||||
msg = 'CSRF Failure. Session token was %s and request token was %s'
|
||||
logger.error(msg, token, found_token)
|
||||
|
||||
if not token:
|
||||
logger.warning('No CSRF token in session.')
|
||||
|
||||
|
||||
def request_error(exception=None, **kwargs):
|
||||
data = kwargs.copy()
|
||||
if exception:
|
||||
data['message'] = exception.message
|
||||
|
||||
return make_response(jsonify(data), 400)
|
||||
|
||||
|
||||
def get_route_data():
|
||||
|
@ -132,7 +146,7 @@ def discovery():
|
|||
@api.route('/')
|
||||
@internal_api_call
|
||||
def welcome():
|
||||
return make_response('welcome', 200)
|
||||
return jsonify({'version': '0.5'})
|
||||
|
||||
|
||||
@api.route('/plans/')
|
||||
|
@ -222,20 +236,14 @@ def convert_user_to_organization():
|
|||
# Ensure that the new admin user is the not user being converted.
|
||||
admin_username = convert_data['adminUser']
|
||||
if admin_username == user.username:
|
||||
error_resp = jsonify({
|
||||
'reason': 'invaliduser'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(reason='invaliduser',
|
||||
message='The admin user is not valid')
|
||||
|
||||
# Ensure that the sign in credentials work.
|
||||
admin_password = convert_data['adminPassword']
|
||||
if not model.verify_user(admin_username, admin_password):
|
||||
error_resp = jsonify({
|
||||
'reason': 'invaliduser'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(reason='invaliduser',
|
||||
message='The admin user credentials are not valid')
|
||||
|
||||
# Subscribe the organization to the new plan.
|
||||
plan = convert_data['plan']
|
||||
|
@ -271,22 +279,15 @@ def change_user_details():
|
|||
new_email = user_data['email']
|
||||
if model.find_user_by_email(new_email):
|
||||
# Email already used.
|
||||
error_resp = jsonify({
|
||||
'message': 'E-mail address already used'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(message='E-mail address already used')
|
||||
|
||||
logger.debug('Sending email to change email address for user: %s', user.username)
|
||||
logger.debug('Sending email to change email address for user: %s',
|
||||
user.username)
|
||||
code = model.create_confirm_email_code(user, new_email=new_email)
|
||||
send_change_email(user.username, user_data['email'], code.code)
|
||||
|
||||
except model.InvalidPasswordException, ex:
|
||||
error_resp = jsonify({
|
||||
'message': ex.message,
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(exception=ex)
|
||||
|
||||
return jsonify(user_view(user))
|
||||
|
||||
|
@ -298,11 +299,7 @@ def create_new_user():
|
|||
|
||||
existing_user = model.get_user(user_data['username'])
|
||||
if existing_user:
|
||||
error_resp = jsonify({
|
||||
'message': 'The username already exists'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(message='The username already exists')
|
||||
|
||||
try:
|
||||
new_user = model.create_user(user_data['username'], user_data['password'],
|
||||
|
@ -311,11 +308,7 @@ def create_new_user():
|
|||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||
return make_response('Created', 201)
|
||||
except model.DataModelException as ex:
|
||||
error_resp = jsonify({
|
||||
'message': ex.message,
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(exception=ex)
|
||||
|
||||
|
||||
@api.route('/signin', methods=['POST'])
|
||||
|
@ -336,7 +329,7 @@ def conduct_signin(username_or_email, password):
|
|||
verified = model.verify_user(username_or_email, password)
|
||||
if verified:
|
||||
if common_login(verified):
|
||||
return make_response('Success', 200)
|
||||
return jsonify({'success': True})
|
||||
else:
|
||||
needs_email_verification = True
|
||||
|
||||
|
@ -357,7 +350,7 @@ def conduct_signin(username_or_email, password):
|
|||
def logout():
|
||||
logout_user()
|
||||
identity_changed.send(app, identity=AnonymousIdentity())
|
||||
return make_response('Success', 200)
|
||||
return jsonify({'success': True})
|
||||
|
||||
|
||||
@api.route("/recovery", methods=['POST'])
|
||||
|
@ -459,22 +452,15 @@ def create_organization():
|
|||
pass
|
||||
|
||||
if existing:
|
||||
error_resp = jsonify({
|
||||
'message': 'A user or organization with this name already exists'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
msg = 'A user or organization with this name already exists'
|
||||
return request_error(message=msg)
|
||||
|
||||
try:
|
||||
model.create_organization(org_data['name'], org_data['email'],
|
||||
current_user.db_user())
|
||||
return make_response('Created', 201)
|
||||
except model.DataModelException as ex:
|
||||
error_resp = jsonify({
|
||||
'message': ex.message,
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(exception=ex)
|
||||
|
||||
|
||||
def org_view(o, teams):
|
||||
|
@ -529,12 +515,7 @@ def change_organization_details(orgname):
|
|||
if 'email' in org_data and org_data['email'] != org.email:
|
||||
new_email = org_data['email']
|
||||
if model.find_user_by_email(new_email):
|
||||
# Email already used.
|
||||
error_resp = jsonify({
|
||||
'message': 'E-mail address already used'
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(message='E-mail address already used')
|
||||
|
||||
logger.debug('Changing email address for organization: %s', org.username)
|
||||
model.update_email(org, new_email)
|
||||
|
@ -568,7 +549,7 @@ def prototype_view(proto, org_members):
|
|||
'id': proto.uuid,
|
||||
}
|
||||
|
||||
@api.route('/api/organization/<orgname>/prototypes', methods=['GET'])
|
||||
@api.route('/organization/<orgname>/prototypes', methods=['GET'])
|
||||
@api_login_required
|
||||
def get_organization_prototype_permissions(orgname):
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
|
@ -606,7 +587,7 @@ def log_prototype_action(action_kind, orgname, prototype, **kwargs):
|
|||
log_action(action_kind, orgname, log_params)
|
||||
|
||||
|
||||
@api.route('/api/organization/<orgname>/prototypes', methods=['POST'])
|
||||
@api.route('/organization/<orgname>/prototypes', methods=['POST'])
|
||||
@api_login_required
|
||||
def create_organization_prototype_permission(orgname):
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
|
@ -619,7 +600,8 @@ def create_organization_prototype_permission(orgname):
|
|||
details = request.get_json()
|
||||
activating_username = None
|
||||
|
||||
if 'activating_user' in details and details['activating_user'] and 'name' in details['activating_user']:
|
||||
if ('activating_user' in details and details['activating_user'] and
|
||||
'name' in details['activating_user']):
|
||||
activating_username = details['activating_user']['name']
|
||||
|
||||
delegate = details['delegate']
|
||||
|
@ -637,10 +619,10 @@ def create_organization_prototype_permission(orgname):
|
|||
if delegate_teamname else None)
|
||||
|
||||
if activating_username and not activating_user:
|
||||
abort(404)
|
||||
return request_error(message='Unknown activating user')
|
||||
|
||||
if not delegate_user and not delegate_team:
|
||||
abort(400)
|
||||
return request_error(message='Missing delagate user or team')
|
||||
|
||||
role_name = details['role']
|
||||
|
||||
|
@ -653,7 +635,7 @@ def create_organization_prototype_permission(orgname):
|
|||
abort(403)
|
||||
|
||||
|
||||
@api.route('/api/organization/<orgname>/prototypes/<prototypeid>',
|
||||
@api.route('/organization/<orgname>/prototypes/<prototypeid>',
|
||||
methods=['DELETE'])
|
||||
@api_login_required
|
||||
def delete_organization_prototype_permission(orgname, prototypeid):
|
||||
|
@ -675,7 +657,7 @@ def delete_organization_prototype_permission(orgname, prototypeid):
|
|||
abort(403)
|
||||
|
||||
|
||||
@api.route('/api/organization/<orgname>/prototypes/<prototypeid>',
|
||||
@api.route('/organization/<orgname>/prototypes/<prototypeid>',
|
||||
methods=['PUT'])
|
||||
@api_login_required
|
||||
def update_organization_prototype_permission(orgname, prototypeid):
|
||||
|
@ -898,7 +880,7 @@ def update_organization_team_member(orgname, teamname, membername):
|
|||
# Find the user.
|
||||
user = model.get_user(membername)
|
||||
if not user:
|
||||
abort(400)
|
||||
return request_error(message='Unknown user')
|
||||
|
||||
# Add the user to the team.
|
||||
model.add_user_to_team(user, team)
|
||||
|
@ -939,7 +921,7 @@ def create_repo():
|
|||
|
||||
existing = model.get_repository(namespace_name, repository_name)
|
||||
if existing:
|
||||
return make_response('Repository already exists', 400)
|
||||
return request_error(message='Repository already exists')
|
||||
|
||||
visibility = req['visibility']
|
||||
|
||||
|
@ -1012,7 +994,7 @@ def list_repos():
|
|||
if page:
|
||||
try:
|
||||
page = int(page)
|
||||
except:
|
||||
except Exception:
|
||||
page = None
|
||||
|
||||
username = None
|
||||
|
@ -1160,35 +1142,65 @@ def get_repo(namespace, repository):
|
|||
abort(403) # Permission denied
|
||||
|
||||
|
||||
def build_status_view(build_obj):
|
||||
status = build_logs.get_status(build_obj.uuid)
|
||||
return {
|
||||
'id': build_obj.uuid,
|
||||
'phase': build_obj.phase,
|
||||
'status': status,
|
||||
}
|
||||
|
||||
|
||||
@api.route('/repository/<path:repository>/build/', methods=['GET'])
|
||||
@parse_repository_name
|
||||
def get_repo_builds(namespace, repository):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
is_public = model.repository_is_public(namespace, repository)
|
||||
if permission.can() or is_public:
|
||||
def build_view(build_obj):
|
||||
# TODO(jake): Filter these logs if the current user can only *read* the repo.
|
||||
if build_obj.status_url:
|
||||
# Delegate the status to the build node
|
||||
node_status = requests.get(build_obj.status_url).json()
|
||||
node_status['id'] = build_obj.id
|
||||
return node_status
|
||||
|
||||
# If there was no status url, do the best we can
|
||||
# The format of this block should mirror that of the buildserver.
|
||||
return {
|
||||
'id': build_obj.id,
|
||||
'total_commands': None,
|
||||
'current_command': None,
|
||||
'push_completion': 0.0,
|
||||
'status': build_obj.phase,
|
||||
'message': None,
|
||||
'image_completion': {},
|
||||
}
|
||||
|
||||
builds = model.list_repository_builds(namespace, repository)
|
||||
return jsonify({
|
||||
'builds': [build_view(build) for build in builds]
|
||||
'builds': [build_status_view(build) for build in builds]
|
||||
})
|
||||
|
||||
abort(403) # Permission denied
|
||||
|
||||
|
||||
@api.route('/repository/<path:repository>/build/<build_uuid>/status',
|
||||
methods=['GET'])
|
||||
@parse_repository_name
|
||||
def get_repo_build_status(namespace, repository, build_uuid):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
is_public = model.repository_is_public(namespace, repository)
|
||||
if permission.can() or is_public:
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
return jsonify(build_status_view(build))
|
||||
|
||||
abort(403) # Permission denied
|
||||
|
||||
|
||||
@api.route('/repository/<path:repository>/build/<build_uuid>/logs',
|
||||
methods=['GET'])
|
||||
@parse_repository_name
|
||||
def get_repo_build_logs(namespace, repository, build_uuid):
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if permission.can():
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
|
||||
start = int(request.args.get('start', -1000))
|
||||
end = int(request.args.get('end', -1))
|
||||
count, logs = build_logs.get_log_entries(build.uuid, start, end)
|
||||
|
||||
if start < 0:
|
||||
start = max(0, count + start)
|
||||
|
||||
if end < 0:
|
||||
end = count + end
|
||||
|
||||
return jsonify({
|
||||
'start': start,
|
||||
'end': end,
|
||||
'total': count,
|
||||
'logs': [log for log in logs],
|
||||
})
|
||||
|
||||
abort(403) # Permission denied
|
||||
|
@ -1210,15 +1222,21 @@ def request_repo_build(namespace, repository):
|
|||
tag = '%s/%s/%s' % (host, repo.namespace, repo.name)
|
||||
build_request = model.create_repository_build(repo, token, dockerfile_id,
|
||||
tag)
|
||||
dockerfile_build_queue.put(json.dumps({'build_id': build_request.id}))
|
||||
dockerfile_build_queue.put(json.dumps({
|
||||
'build_uuid': build_request.uuid,
|
||||
'namespace': namespace,
|
||||
'repository': repository,
|
||||
}))
|
||||
|
||||
log_action('build_dockerfile', namespace,
|
||||
{'repo': repository, 'namespace': namespace,
|
||||
'fileid': dockerfile_id}, repo=repo)
|
||||
|
||||
resp = jsonify({
|
||||
'started': True
|
||||
})
|
||||
resp = jsonify(build_status_view(build_request))
|
||||
repo_string = '%s/%s' % (namespace, repository)
|
||||
resp.headers['Location'] = url_for('api.get_repo_build_status',
|
||||
repository=repo_string,
|
||||
build_uuid=build_request.uuid)
|
||||
resp.status_code = 201
|
||||
return resp
|
||||
|
||||
|
@ -1242,7 +1260,8 @@ def create_webhook(namespace, repository):
|
|||
webhook = model.create_webhook(repo, request.get_json())
|
||||
resp = jsonify(webhook_view(webhook))
|
||||
repo_string = '%s/%s' % (namespace, repository)
|
||||
resp.headers['Location'] = url_for('get_webhook', repository=repo_string,
|
||||
resp.headers['Location'] = url_for('api.get_webhook',
|
||||
repository=repo_string,
|
||||
public_id=webhook.public_id)
|
||||
log_action('add_repo_webhook', namespace,
|
||||
{'repo': repository, 'webhook_id': webhook.public_id},
|
||||
|
@ -1379,7 +1398,7 @@ def get_image_changes(namespace, repository, image_id):
|
|||
abort(403)
|
||||
|
||||
|
||||
@api.route('/api/repository/<path:repository>/tag/<tag>',
|
||||
@api.route('/repository/<path:repository>/tag/<tag>',
|
||||
methods=['DELETE'])
|
||||
@parse_repository_name
|
||||
def delete_full_tag(namespace, repository, tag):
|
||||
|
@ -1543,11 +1562,7 @@ def change_user_permissions(namespace, repository, username):
|
|||
# This repository is not part of an organization
|
||||
pass
|
||||
except model.DataModelException as ex:
|
||||
error_resp = jsonify({
|
||||
'message': ex.message,
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(exception=ex)
|
||||
|
||||
log_action('change_repo_permission', namespace,
|
||||
{'username': username, 'repo': repository,
|
||||
|
@ -1600,11 +1615,7 @@ def delete_user_permissions(namespace, repository, username):
|
|||
try:
|
||||
model.delete_user_permission(username, namespace, repository)
|
||||
except model.DataModelException as ex:
|
||||
error_resp = jsonify({
|
||||
'message': ex.message,
|
||||
})
|
||||
error_resp.status_code = 400
|
||||
return error_resp
|
||||
return request_error(exception=ex)
|
||||
|
||||
log_action('delete_repo_permission', namespace,
|
||||
{'username': username, 'repo': repository},
|
||||
|
@ -1860,7 +1871,7 @@ def subscribe(user, plan, token, require_business_plan):
|
|||
plan_found['price'] == 0):
|
||||
logger.warning('Business attempting to subscribe to personal plan: %s',
|
||||
user.username)
|
||||
abort(400)
|
||||
return request_error(message='No matching plan found')
|
||||
|
||||
private_repos = model.get_private_repo_count(user.username)
|
||||
|
||||
|
@ -2090,7 +2101,7 @@ def delete_user_robot(robot_shortname):
|
|||
parent = current_user.db_user()
|
||||
model.delete_robot(format_robot_username(parent.username, robot_shortname))
|
||||
log_action('delete_robot', parent.username, {'robot': robot_shortname})
|
||||
return make_response('No Content', 204)
|
||||
return make_response('Deleted', 204)
|
||||
|
||||
|
||||
@api.route('/organization/<orgname>/robots/<robot_shortname>',
|
||||
|
@ -2102,7 +2113,7 @@ def delete_org_robot(orgname, robot_shortname):
|
|||
if permission.can():
|
||||
model.delete_robot(format_robot_username(orgname, robot_shortname))
|
||||
log_action('delete_robot', orgname, {'robot': robot_shortname})
|
||||
return make_response('No Content', 204)
|
||||
return make_response('Deleted', 204)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import os
|
||||
import base64
|
||||
|
||||
from flask import request, abort, session
|
||||
from flask import request, abort, session, make_response
|
||||
from flask.ext.login import login_user, UserMixin
|
||||
from flask.ext.principal import identity_changed
|
||||
|
||||
|
|
|
@ -2,12 +2,12 @@ import json
|
|||
import logging
|
||||
import urlparse
|
||||
|
||||
from flask import request, make_response, jsonify, abort, session, Blueprint
|
||||
from flask import request, make_response, jsonify, session, Blueprint
|
||||
from functools import wraps
|
||||
|
||||
from data import model
|
||||
from data.queue import webhook_queue
|
||||
from app import app, mixpanel
|
||||
from app import mixpanel
|
||||
from auth.auth import (process_auth, get_authenticated_user,
|
||||
get_validated_token)
|
||||
from util.names import parse_repository_name
|
||||
|
@ -16,6 +16,9 @@ from auth.permissions import (ModifyRepositoryPermission, UserPermission,
|
|||
ReadRepositoryPermission,
|
||||
CreateRepositoryPermission)
|
||||
|
||||
from util.http import abort
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
index = Blueprint('index', __name__)
|
||||
|
@ -64,14 +67,14 @@ def create_user():
|
|||
model.load_token_data(password)
|
||||
return make_response('Verified', 201)
|
||||
except model.InvalidTokenException:
|
||||
return make_response('Invalid access token.', 400)
|
||||
abort(400, 'Invalid access token.', issue='invalid-access-token')
|
||||
|
||||
elif '+' in username:
|
||||
try:
|
||||
model.verify_robot(username, password)
|
||||
return make_response('Verified', 201)
|
||||
except model.InvalidRobotException:
|
||||
return make_response('Invalid robot account or password.', 400)
|
||||
abort(400, 'Invalid robot account or password.', issue='robot-login-failure')
|
||||
|
||||
existing_user = model.get_user(username)
|
||||
if existing_user:
|
||||
|
@ -79,7 +82,8 @@ def create_user():
|
|||
if verified:
|
||||
return make_response('Verified', 201)
|
||||
else:
|
||||
return make_response('Invalid password.', 400)
|
||||
abort(400, 'Invalid password.', issue='login-failure')
|
||||
|
||||
else:
|
||||
# New user case
|
||||
new_user = model.create_user(username, password, user_data['email'])
|
||||
|
@ -131,23 +135,30 @@ def update_user(username):
|
|||
@generate_headers(role='write')
|
||||
def create_repository(namespace, repository):
|
||||
image_descriptions = json.loads(request.data)
|
||||
|
||||
repo = model.get_repository(namespace, repository)
|
||||
|
||||
if not repo and get_authenticated_user() is None:
|
||||
logger.debug('Attempt to create new repository without user auth.')
|
||||
abort(401)
|
||||
abort(401,
|
||||
message='Cannot create a repository as a guest. Please login via "docker login" first.',
|
||||
issue='no-login')
|
||||
|
||||
elif repo:
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if not permission.can():
|
||||
abort(403)
|
||||
abort(403,
|
||||
message='You do not have permission to modify repository %(namespace)s/%(repository)s',
|
||||
issue='no-repo-write-permission',
|
||||
namespace=namespace, repository=repository)
|
||||
|
||||
else:
|
||||
permission = CreateRepositoryPermission(namespace)
|
||||
if not permission.can():
|
||||
logger.info('Attempt to create a new repo with insufficient perms.')
|
||||
abort(403)
|
||||
abort(403,
|
||||
message='You do not have permission to create repositories in namespace "%(namespace)s"',
|
||||
issue='no-create-permission',
|
||||
namespace=namespace)
|
||||
|
||||
logger.debug('Creaing repository with owner: %s' %
|
||||
get_authenticated_user().username)
|
||||
|
@ -200,7 +211,7 @@ def update_images(namespace, repository):
|
|||
repo = model.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
# Make sure the repo actually exists.
|
||||
abort(404)
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
image_with_checksums = json.loads(request.data)
|
||||
|
||||
|
@ -248,7 +259,7 @@ def get_repository_images(namespace, repository):
|
|||
# We can't rely on permissions to tell us if a repo exists anymore
|
||||
repo = model.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
abort(404)
|
||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||
|
||||
all_images = []
|
||||
for image in model.get_repository_images(namespace, repository):
|
||||
|
@ -296,18 +307,18 @@ def get_repository_images(namespace, repository):
|
|||
@parse_repository_name
|
||||
@generate_headers(role='write')
|
||||
def delete_repository_images(namespace, repository):
|
||||
return make_response('Not Implemented', 501)
|
||||
abort(501, 'Not Implemented', issue='not-implemented')
|
||||
|
||||
|
||||
@index.route('/repositories/<path:repository>/auth', methods=['PUT'])
|
||||
@parse_repository_name
|
||||
def put_repository_auth(namespace, repository):
|
||||
return make_response('Not Implemented', 501)
|
||||
abort(501, 'Not Implemented', issue='not-implemented')
|
||||
|
||||
|
||||
@index.route('/search', methods=['GET'])
|
||||
def get_search():
|
||||
return make_response('Not Implemented', 501)
|
||||
abort(501, 'Not Implemented', issue='not-implemented')
|
||||
|
||||
|
||||
@index.route('/_ping')
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from flask import (make_response, request, session, Response, abort,
|
||||
redirect, Blueprint)
|
||||
from flask import (make_response, request, session, Response, redirect,
|
||||
Blueprint, abort as flask_abort)
|
||||
from functools import wraps
|
||||
from datetime import datetime
|
||||
from time import time
|
||||
|
@ -12,6 +12,7 @@ from data.queue import image_diff_queue
|
|||
from app import app
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from util import checksums, changes
|
||||
from util.http import abort
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from data import model
|
||||
|
@ -45,8 +46,9 @@ def require_completion(f):
|
|||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
if store.exists(store.image_mark_path(namespace, repository,
|
||||
kwargs['image_id'])):
|
||||
logger.warning('Image is already being uploaded: %s', kwargs['image_id'])
|
||||
abort(400) # 'Image is being uploaded, retry later')
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=kwargs['image_id'])
|
||||
|
||||
return f(namespace, repository, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
@ -90,9 +92,8 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
try:
|
||||
return Response(store.stream_read(path), headers=headers)
|
||||
except IOError:
|
||||
logger.warning('Image not found: %s', image_id)
|
||||
abort(404) # 'Image not found', 404)
|
||||
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
|
@ -108,16 +109,20 @@ def put_image_layer(namespace, repository, image_id):
|
|||
json_data = store.get_content(store.image_json_path(namespace, repository,
|
||||
image_id))
|
||||
except IOError:
|
||||
abort(404) # 'Image not found', 404)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
layer_path = store.image_layer_path(namespace, repository, image_id)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
|
||||
if store.exists(layer_path) and not store.exists(mark_path):
|
||||
abort(409) # 'Image already exists', 409)
|
||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||
|
||||
input_stream = request.stream
|
||||
if request.headers.get('transfer-encoding') == 'chunked':
|
||||
# Careful, might work only with WSGI servers supporting chunked
|
||||
# encoding (Gunicorn)
|
||||
input_stream = request.environ['wsgi.input']
|
||||
|
||||
# compute checksums
|
||||
csums = []
|
||||
sr = SocketReader(input_stream)
|
||||
|
@ -127,6 +132,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
sr.add_handler(sum_hndlr)
|
||||
store.stream_write(layer_path, sr)
|
||||
csums.append('sha256:{0}'.format(h.hexdigest()))
|
||||
|
||||
try:
|
||||
image_size = tmp.tell()
|
||||
|
||||
|
@ -139,6 +145,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
except (IOError, checksums.TarError) as e:
|
||||
logger.debug('put_image_layer: Error when computing tarsum '
|
||||
'{0}'.format(e))
|
||||
|
||||
try:
|
||||
checksum = store.get_content(store.image_checksum_path(namespace,
|
||||
repository,
|
||||
|
@ -148,10 +155,13 @@ def put_image_layer(namespace, repository, image_id):
|
|||
# Not removing the mark though, image is not downloadable yet.
|
||||
session['checksum'] = csums
|
||||
return make_response('true', 200)
|
||||
|
||||
# We check if the checksums provided matches one the one we computed
|
||||
if checksum not in csums:
|
||||
logger.warning('put_image_layer: Wrong checksum')
|
||||
abort(400) # 'Checksum mismatch, ignoring the layer')
|
||||
abort(400, 'Checksum mismatch; ignoring the layer for image %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
|
||||
# Checksum is ok, we remove the marker
|
||||
store.remove(mark_path)
|
||||
|
||||
|
@ -177,24 +187,31 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
|
||||
checksum = request.headers.get('X-Docker-Checksum')
|
||||
if not checksum:
|
||||
logger.warning('Missing Image\'s checksum: %s', image_id)
|
||||
abort(400) # 'Missing Image\'s checksum')
|
||||
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum', image_id=image_id)
|
||||
|
||||
if not session.get('checksum'):
|
||||
logger.warning('Checksum not found in Cookie for image: %s', image_id)
|
||||
abort(400) # 'Checksum not found in Cookie')
|
||||
abort(400, 'Checksum not found in Cookie for image %(imaage_id)s',
|
||||
issue='missing-checksum-cookie', image_id=image_id)
|
||||
|
||||
if not store.exists(store.image_json_path(namespace, repository, image_id)):
|
||||
abort(404) # 'Image not found', 404)
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
if not store.exists(mark_path):
|
||||
abort(409) # 'Cannot set this image checksum', 409)
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||
issue='image-write-error', image_id=image_id)
|
||||
|
||||
err = store_checksum(namespace, repository, image_id, checksum)
|
||||
if err:
|
||||
abort(err)
|
||||
abort(400, err)
|
||||
|
||||
if checksum not in session.get('checksum', []):
|
||||
logger.debug('session checksums: %s' % session.get('checksum', []))
|
||||
logger.debug('client supplied checksum: %s' % checksum)
|
||||
logger.debug('put_image_layer: Wrong checksum')
|
||||
abort(400) # 'Checksum mismatch')
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
|
||||
# Checksum is ok, we remove the marker
|
||||
store.remove(mark_path)
|
||||
|
||||
|
@ -225,16 +242,19 @@ def get_image_json(namespace, repository, image_id, headers):
|
|||
data = store.get_content(store.image_json_path(namespace, repository,
|
||||
image_id))
|
||||
except IOError:
|
||||
abort(404) # 'Image not found', 404)
|
||||
flask_abort(404)
|
||||
|
||||
try:
|
||||
size = store.get_size(store.image_layer_path(namespace, repository,
|
||||
image_id))
|
||||
headers['X-Docker-Size'] = str(size)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id)
|
||||
if store.exists(checksum_path):
|
||||
headers['X-Docker-Checksum'] = store.get_content(checksum_path)
|
||||
|
||||
response = make_response(data, 200)
|
||||
response.headers.extend(headers)
|
||||
return response
|
||||
|
@ -255,7 +275,8 @@ def get_image_ancestry(namespace, repository, image_id, headers):
|
|||
data = store.get_content(store.image_ancestry_path(namespace, repository,
|
||||
image_id))
|
||||
except IOError:
|
||||
abort(404) # 'Image not found', 404)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
response = make_response(json.dumps(json.loads(data)), 200)
|
||||
response.headers.extend(headers)
|
||||
return response
|
||||
|
@ -280,6 +301,7 @@ def store_checksum(namespace, repository, image_id, checksum):
|
|||
checksum_parts = checksum.split(':')
|
||||
if len(checksum_parts) != 2:
|
||||
return 'Invalid checksum format'
|
||||
|
||||
# We store the checksum
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id)
|
||||
store.put_content(checksum_path, checksum)
|
||||
|
@ -298,36 +320,39 @@ def put_image_json(namespace, repository, image_id):
|
|||
except json.JSONDecodeError:
|
||||
pass
|
||||
if not data or not isinstance(data, dict):
|
||||
logger.warning('Invalid JSON for image: %s json: %s', image_id,
|
||||
request.data)
|
||||
abort(400) # 'Invalid JSON')
|
||||
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
|
||||
issue='invalid-request', image_id=image_id, json=request.data)
|
||||
|
||||
if 'id' not in data:
|
||||
logger.warning('Missing key `id\' in JSON for image: %s', image_id)
|
||||
abort(400) # 'Missing key `id\' in JSON')
|
||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
# Read the checksum
|
||||
checksum = request.headers.get('X-Docker-Checksum')
|
||||
if checksum:
|
||||
# Storing the checksum is optional at this stage
|
||||
err = store_checksum(namespace, repository, image_id, checksum)
|
||||
if err:
|
||||
abort(err)
|
||||
abort(400, err, issue='write-error')
|
||||
|
||||
else:
|
||||
# We cleanup any old checksum in case it's a retry after a fail
|
||||
store.remove(store.image_checksum_path(namespace, repository, image_id))
|
||||
if image_id != data['id']:
|
||||
logger.warning('JSON data contains invalid id for image: %s', image_id)
|
||||
abort(400) # 'JSON data contains invalid id')
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
parent_id = data.get('parent')
|
||||
if parent_id and not store.exists(store.image_json_path(namespace,
|
||||
repository,
|
||||
data['parent'])):
|
||||
logger.warning('Image depends on a non existing parent image: %s',
|
||||
image_id)
|
||||
abort(400) # 'Image depends on a non existing parent')
|
||||
if (parent_id and not
|
||||
store.exists(store.image_json_path(namespace, repository, parent_id))):
|
||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||
|
||||
json_path = store.image_json_path(namespace, repository, image_id)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
if store.exists(json_path) and not store.exists(mark_path):
|
||||
abort(409) # 'Image already exists', 409)
|
||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||
|
||||
# If we reach that point, it means that this is a new image or a retry
|
||||
# on a failed push
|
||||
# save the metadata
|
||||
|
|
|
@ -248,7 +248,7 @@ def github_oauth_callback():
|
|||
return render_page_template('githuberror.html', error_message=ex.message)
|
||||
|
||||
if common_login(to_login):
|
||||
return redirect(url_for('index'))
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
return render_page_template('githuberror.html')
|
||||
|
||||
|
@ -261,7 +261,7 @@ def github_oauth_attach():
|
|||
github_id = user_data['id']
|
||||
user_obj = current_user.db_user()
|
||||
model.attach_federated_login(user_obj, 'github', github_id)
|
||||
return redirect(url_for('user'))
|
||||
return redirect(url_for('web.user'))
|
||||
|
||||
|
||||
@web.route('/confirm', methods=['GET'])
|
||||
|
@ -277,7 +277,8 @@ def confirm_email():
|
|||
|
||||
common_login(user)
|
||||
|
||||
return redirect(url_for('user', tab='email') if new_email else url_for('index'))
|
||||
return redirect(url_for('web.user', tab='email')
|
||||
if new_email else url_for('web.index'))
|
||||
|
||||
|
||||
@web.route('/recovery', methods=['GET'])
|
||||
|
@ -287,6 +288,6 @@ def confirm_recovery():
|
|||
|
||||
if user:
|
||||
common_login(user)
|
||||
return redirect(url_for('user'))
|
||||
return redirect(url_for('web.user'))
|
||||
else:
|
||||
abort(403)
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
bind = 'unix:/tmp/gunicorn.sock'
|
||||
workers = 8
|
||||
worker_class = 'gevent'
|
||||
timeout = 2000
|
||||
daemon = True
|
44
initdb.py
|
@ -4,7 +4,8 @@ import hashlib
|
|||
import random
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from peewee import SqliteDatabase, create_model_tables, drop_model_tables
|
||||
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
|
||||
savepoint_sqlite)
|
||||
|
||||
from data.database import *
|
||||
from data import model
|
||||
|
@ -29,7 +30,6 @@ SAMPLE_CMDS = [["/bin/bash"],
|
|||
REFERENCE_DATE = datetime(2013, 6, 23)
|
||||
TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
|
||||
|
||||
|
||||
def __gen_checksum(image_id):
|
||||
h = hashlib.md5(image_id)
|
||||
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
|
||||
|
@ -113,6 +113,44 @@ def __generate_repository(user, name, description, is_public, permissions,
|
|||
return repo
|
||||
|
||||
|
||||
db_initialized_for_testing = False
|
||||
testcases = {}
|
||||
|
||||
def finished_database_for_testing(testcase):
|
||||
""" Called when a testcase has finished using the database, indicating that
|
||||
any changes should be discarded.
|
||||
"""
|
||||
global testcases
|
||||
testcases[testcase]['savepoint'].__exit__(True, None, None)
|
||||
|
||||
def setup_database_for_testing(testcase):
|
||||
""" Called when a testcase has started using the database, indicating that
|
||||
the database should be setup (if not already) and a savepoint created.
|
||||
"""
|
||||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if (not isinstance(model.db, SqliteDatabase) or
|
||||
app.config['DB_DRIVER'] is not SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
global db_initialized_for_testing
|
||||
if not db_initialized_for_testing:
|
||||
logger.debug('Setting up DB for testing.')
|
||||
|
||||
# Setup the database.
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database()
|
||||
|
||||
db_initialized_for_testing = True
|
||||
|
||||
# Create a savepoint for the testcase.
|
||||
global testcases
|
||||
testcases[testcase] = {}
|
||||
testcases[testcase]['savepoint'] = savepoint_sqlite(db)
|
||||
testcases[testcase]['savepoint'].__enter__()
|
||||
|
||||
def initialize_database():
|
||||
create_model_tables(all_models)
|
||||
|
||||
|
@ -350,7 +388,7 @@ def populate_database():
|
|||
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(**app.config['LOGGING_CONFIG'])
|
||||
app.config['LOGGING_CONFIG']()
|
||||
initialize_database()
|
||||
|
||||
if app.config.get('POPULATE_DB_TEST_DATA', False):
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
worker_processes 2;
|
||||
|
||||
user root nogroup;
|
||||
pid /mnt/nginx/nginx.pid;
|
||||
error_log /mnt/nginx/nginx.error.log;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
accept_mutex off;
|
||||
}
|
||||
|
||||
http {
|
||||
types_hash_max_size 2048;
|
||||
include /usr/local/nginx/conf/mime.types.default;
|
||||
|
||||
default_type application/octet-stream;
|
||||
access_log /mnt/nginx/nginx.access.log combined;
|
||||
sendfile on;
|
||||
|
||||
root /root/quay/;
|
||||
|
||||
gzip on;
|
||||
gzip_http_version 1.0;
|
||||
gzip_proxied any;
|
||||
gzip_min_length 500;
|
||||
gzip_disable "MSIE [1-6]\.";
|
||||
gzip_types text/plain text/xml text/css
|
||||
text/javascript application/x-javascript
|
||||
application/octet-stream;
|
||||
|
||||
upstream app_server {
|
||||
server unix:/tmp/gunicorn.sock fail_timeout=0;
|
||||
# For a TCP configuration:
|
||||
# server 192.168.0.7:8000 fail_timeout=0;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
server_name _;
|
||||
rewrite ^ https://$host$request_uri? permanent;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 default;
|
||||
client_max_body_size 8G;
|
||||
client_body_temp_path /mnt/nginx/client_body 1 2;
|
||||
server_name _;
|
||||
|
||||
keepalive_timeout 5;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./certs/quay-staging-unified.cert;
|
||||
ssl_certificate_key ./certs/quay-staging.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
if ($args ~ "_escaped_fragment_") {
|
||||
rewrite ^ /snapshot$uri;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /root/quay/static/;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
|
||||
proxy_request_buffering off;
|
||||
proxy_set_header Transfer-Encoding $http_transfer_encoding;
|
||||
|
||||
proxy_pass http://app_server;
|
||||
proxy_read_timeout 2000;
|
||||
proxy_temp_path /mnt/nginx/proxy_temp 1 2;
|
||||
}
|
||||
}
|
||||
}
|
81
nginx.conf
|
@ -1,81 +0,0 @@
|
|||
worker_processes 8;
|
||||
|
||||
user nobody nogroup;
|
||||
pid /mnt/nginx/nginx.pid;
|
||||
error_log /mnt/nginx/nginx.error.log;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
accept_mutex off;
|
||||
}
|
||||
|
||||
http {
|
||||
types_hash_max_size 2048;
|
||||
include /usr/local/nginx/conf/mime.types.default;
|
||||
|
||||
default_type application/octet-stream;
|
||||
access_log /mnt/nginx/nginx.access.log combined;
|
||||
sendfile on;
|
||||
|
||||
gzip on;
|
||||
gzip_http_version 1.0;
|
||||
gzip_proxied any;
|
||||
gzip_min_length 500;
|
||||
gzip_disable "MSIE [1-6]\.";
|
||||
gzip_types text/plain text/xml text/css
|
||||
text/javascript application/x-javascript
|
||||
application/octet-stream;
|
||||
|
||||
upstream app_server {
|
||||
server unix:/tmp/gunicorn.sock fail_timeout=0;
|
||||
# For a TCP configuration:
|
||||
# server 192.168.0.7:8000 fail_timeout=0;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
server_name _;
|
||||
rewrite ^ https://$host$request_uri? permanent;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 default;
|
||||
client_max_body_size 8G;
|
||||
client_body_temp_path /mnt/nginx/client_body 1 2;
|
||||
server_name _;
|
||||
|
||||
keepalive_timeout 5;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate ./certs/quay-unified.cert;
|
||||
ssl_certificate_key ./certs/quay.key;
|
||||
ssl_session_timeout 5m;
|
||||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
if ($args ~ "_escaped_fragment_") {
|
||||
rewrite ^ /snapshot$uri;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /home/ubuntu/quay/static/;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
|
||||
proxy_request_buffering off;
|
||||
proxy_set_header Transfer-Encoding $http_transfer_encoding;
|
||||
|
||||
proxy_pass http://app_server;
|
||||
proxy_read_timeout 2000;
|
||||
proxy_temp_path /mnt/nginx/proxy_temp 1 2;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,4 +17,8 @@ apscheduler
|
|||
python-daemon
|
||||
paramiko
|
||||
python-digitalocean
|
||||
xhtml2pdf
|
||||
xhtml2pdf
|
||||
logstash_formatter
|
||||
redis
|
||||
hiredis
|
||||
git+https://github.com/dotcloud/docker-py.git
|
|
@ -1,9 +1,9 @@
|
|||
APScheduler==2.1.1
|
||||
APScheduler==2.1.2
|
||||
Flask==0.10.1
|
||||
Flask-Login==0.2.9
|
||||
Flask-Mail==0.9.0
|
||||
Flask-Principal==0.4.0
|
||||
Jinja2==2.7.1
|
||||
Jinja2==2.7.2
|
||||
MarkupSafe==0.18
|
||||
Pillow==2.3.0
|
||||
PyMySQL==0.6.1
|
||||
|
@ -11,28 +11,34 @@ Werkzeug==0.9.4
|
|||
argparse==1.2.1
|
||||
beautifulsoup4==4.3.2
|
||||
blinker==1.3
|
||||
boto==2.21.2
|
||||
boto==2.24.0
|
||||
distribute==0.6.34
|
||||
git+https://github.com/dotcloud/docker-py.git
|
||||
ecdsa==0.10
|
||||
gevent==1.0
|
||||
greenlet==0.4.1
|
||||
greenlet==0.4.2
|
||||
gunicorn==18.0
|
||||
hiredis==0.1.2
|
||||
html5lib==1.0b3
|
||||
itsdangerous==0.23
|
||||
lockfile==0.9.1
|
||||
logstash-formatter==0.5.8
|
||||
marisa-trie==0.5.1
|
||||
mixpanel-py==3.0.0
|
||||
paramiko==1.12.0
|
||||
peewee==2.1.7
|
||||
mixpanel-py==3.1.1
|
||||
mock==1.0.1
|
||||
paramiko==1.12.1
|
||||
peewee==2.2.0
|
||||
py-bcrypt==0.4
|
||||
pyPdf==1.13
|
||||
pycrypto==2.6.1
|
||||
python-daemon==1.6
|
||||
python-dateutil==2.2
|
||||
python-digitalocean==0.6
|
||||
redis==2.9.1
|
||||
reportlab==2.7
|
||||
requests==2.1.0
|
||||
six==1.4.1
|
||||
stripe==1.11.0
|
||||
requests==2.2.1
|
||||
six==1.5.2
|
||||
stripe==1.12.0
|
||||
websocket-client==0.11.0
|
||||
wsgiref==0.1.2
|
||||
xhtml2pdf==0.0.5
|
||||
|
|
|
@ -2474,6 +2474,28 @@ p.editable:hover i {
|
|||
font-size: 16px;
|
||||
}
|
||||
|
||||
.repo-breadcrumb-element .crumb {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.repo-breadcrumb-element .crumb:nth-last-of-type(3), .repo-breadcrumb-element .crumb:nth-last-of-type(3) a {
|
||||
color: #aaa !important;
|
||||
}
|
||||
|
||||
.repo-breadcrumb-element .crumb:nth-last-of-type(2), .repo-breadcrumb-element .crumb:nth-last-of-type(2) a {
|
||||
color: #888 !important;
|
||||
}
|
||||
|
||||
.repo-breadcrumb-element .crumb:after {
|
||||
content: "/";
|
||||
color: #ccc;
|
||||
margin-left: 4px;
|
||||
}
|
||||
|
||||
.repo-breadcrumb-element .crumb:hover, .repo-breadcrumb-element .crumb:hover a {
|
||||
color: #2a6496 !important;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
/* Overrides for typeahead to work with bootstrap 3. */
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li><a ng-href="/repository/" target="{{ appLinkTarget() }}">Repositories</a></li>
|
||||
<li><a ng-href="/guide/" target="{{ appLinkTarget() }}">Guide</a></li>
|
||||
<li><a href="http://docs.quay.io/">Documentation</a></li>
|
||||
<li><a ng-href="/tutorial/" target="{{ appLinkTarget() }}">Tutorial</a></li>
|
||||
<li><a ng-href="/plans/" target="{{ appLinkTarget() }}">Pricing</a></li>
|
||||
<li><a ng-href="/organizations/" target="{{ appLinkTarget() }}">Organizations</a></li>
|
||||
|
|
15
static/directives/repo-breadcrumb.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
<span class="repo-breadcrumb-element">
|
||||
<span ng-show="!image">
|
||||
<span class="crumb">
|
||||
<a href="{{ '/repository/?namespace=' + repo.namespace }}">{{repo.namespace}}</a>
|
||||
</span>
|
||||
<span class="current">{{repo.name}}</span>
|
||||
</span>
|
||||
<span ng-show="image">
|
||||
<span class="crumb">
|
||||
<a href="{{ '/repository/?namespace=' + repo.namespace }}">{{repo.namespace}}</a>
|
||||
</span>
|
||||
<span class="crumb"><a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}">{{repo.name}}</a></span>
|
||||
<span class="current">{{image.id.substr(0, 12)}}</span>
|
||||
</span>
|
||||
</span>
|
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 76 KiB |
Before Width: | Height: | Size: 78 KiB After Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 64 KiB After Width: | Height: | Size: 65 KiB |
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 53 KiB |
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 54 KiB |
Before Width: | Height: | Size: 61 KiB After Width: | Height: | Size: 63 KiB |
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 76 KiB |
Before Width: | Height: | Size: 84 KiB After Width: | Height: | Size: 84 KiB |
Before Width: | Height: | Size: 196 KiB After Width: | Height: | Size: 167 KiB |
|
@ -384,6 +384,15 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angu
|
|||
|
||||
return org.is_org_admin;
|
||||
};
|
||||
|
||||
userService.isKnownNamespace = function(namespace) {
|
||||
if (namespace == userResponse.username) {
|
||||
return true;
|
||||
}
|
||||
|
||||
var org = userService.getOrganization(namespace);
|
||||
return !!org;
|
||||
};
|
||||
|
||||
userService.currentUser = function() {
|
||||
return userResponse;
|
||||
|
@ -856,6 +865,24 @@ quayApp.directive('markdownView', function () {
|
|||
});
|
||||
|
||||
|
||||
quayApp.directive('repoBreadcrumb', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
templateUrl: '/static/directives/repo-breadcrumb.html',
|
||||
replace: false,
|
||||
transclude: false,
|
||||
restrict: 'C',
|
||||
scope: {
|
||||
'repo': '=repo',
|
||||
'image': '=image'
|
||||
},
|
||||
controller: function($scope, $element) {
|
||||
}
|
||||
};
|
||||
return directiveDefinitionObject;
|
||||
});
|
||||
|
||||
|
||||
quayApp.directive('repoCircle', function () {
|
||||
var directiveDefinitionObject = {
|
||||
priority: 0,
|
||||
|
@ -2450,13 +2477,13 @@ quayApp.directive('buildStatus', function () {
|
|||
},
|
||||
controller: function($scope, $element) {
|
||||
$scope.getBuildProgress = function(buildInfo) {
|
||||
switch (buildInfo.status) {
|
||||
switch (buildInfo.phase) {
|
||||
case 'building':
|
||||
return (buildInfo.current_command / buildInfo.total_commands) * 100;
|
||||
return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100;
|
||||
break;
|
||||
|
||||
case 'pushing':
|
||||
return buildInfo.push_completion * 100;
|
||||
return buildInfo.status.push_completion * 100;
|
||||
break;
|
||||
|
||||
case 'complete':
|
||||
|
@ -2474,7 +2501,7 @@ quayApp.directive('buildStatus', function () {
|
|||
};
|
||||
|
||||
$scope.getBuildMessage = function(buildInfo) {
|
||||
switch (buildInfo.status) {
|
||||
switch (buildInfo.phase) {
|
||||
case 'initializing':
|
||||
return 'Starting Dockerfile build';
|
||||
break;
|
||||
|
@ -2494,7 +2521,7 @@ quayApp.directive('buildStatus', function () {
|
|||
break;
|
||||
|
||||
case 'error':
|
||||
return 'Dockerfile build failed: ' + buildInfo.message;
|
||||
return 'Dockerfile build failed.';
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1298,7 +1298,7 @@ function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService
|
|||
$location.path('/repository/' + created.namespace + '/' + created.name);
|
||||
}, function(result) {
|
||||
$scope.creating = false;
|
||||
$scope.createError = result.data;
|
||||
$scope.createError = result.data ? result.data.message : 'Cannot create repository';
|
||||
$timeout(function() {
|
||||
$('#repoName').popover('show');
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
<button ng-click="startTour()">Start Tour</button>
|
||||
<div id="test-element">
|
||||
This is a test element
|
||||
<div class="container">
|
||||
<h3>Redirecting...</h3>
|
||||
<META http-equiv="refresh" content="0;URL=http://docs.quay.io/getting-started.html">
|
||||
If this page does not redirect, please <a href="http://docs.quay.io/getting-started.html"> click here</a>.
|
||||
</div>
|
||||
|
|
|
@ -4,11 +4,7 @@
|
|||
<a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}" class="back"><i class="fa fa-chevron-left"></i></a>
|
||||
<h3>
|
||||
<i class="fa fa-archive fa-lg" style="color: #aaa; margin-right: 10px;"></i>
|
||||
<span style="color: #aaa;"> {{repo.namespace}}</span>
|
||||
<span style="color: #ccc">/</span>
|
||||
<span style="color: #666;">{{repo.name}}</span>
|
||||
<span style="color: #ccc">/</span>
|
||||
<span>{{image.value.id.substr(0, 12)}}</span>
|
||||
<span class="repo-breadcrumb" repo="repo" image="image.value"></span>
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
<div class="header row">
|
||||
<a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}" class="back"><i class="fa fa-chevron-left"></i></a>
|
||||
<h3>
|
||||
<span class="repo-circle no-background" repo="repo"></span> <span style="color: #aaa;"> {{repo.namespace}}</span> <span style="color: #ccc">/</span> {{repo.name}}
|
||||
<span class="repo-circle no-background" repo="repo"></span>
|
||||
<span class="repo-breadcrumb" repo="repo"></span>
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
<div class="alert alert-info">
|
||||
<h4 ng-show="namespace == user.username">You don't have any repositories yet!</h4>
|
||||
<h4 ng-show="namespace != user.username">This organization doesn't have any repositories, or you have not been provided access.</h4>
|
||||
<a href="/guide"><b>Click here</b> to learn how to create a repository</a>
|
||||
<a href="http://docs.quay.io/getting-started.html"><b>Click here</b> to learn how to create a repository</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -185,7 +185,11 @@
|
|||
ng-model="org.adminUser" required autofocus>
|
||||
<input id="adminPassword" name="adminPassword" type="password" class="form-control" placeholder="Admin Password"
|
||||
ng-model="org.adminPassword" required>
|
||||
<span class="description">The username and password for the account that will become administrator of the organization</span>
|
||||
<span class="description">
|
||||
The username and password for the account that will become an administrator of the organization.
|
||||
Note that this account <b>must be a separate Quay.io account</b> from the account that you are
|
||||
trying to convert, and <b>must already exist</b>.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Plans Table -->
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
<div class="header">
|
||||
<h3>
|
||||
<span class="repo-circle" repo="repo"></span>
|
||||
<span style="color: #aaa;"> {{repo.namespace}}</span> <span style="color: #ccc">/</span> {{repo.name}}
|
||||
<span class="repo-breadcrumb" repo="repo"></span>
|
||||
<span class="settings-cog" ng-show="repo.can_admin" title="Repository Settings" bs-tooltip="tooltip.title" data-placement="bottom">
|
||||
<a href="{{ '/repository/' + repo.namespace + '/' + repo.name + '/admin' }}">
|
||||
<i class="fa fa-cog fa-lg"></i>
|
||||
|
|
|
@ -17,10 +17,6 @@
|
|||
<loc>https://quay.io/repository/</loc>
|
||||
<changefreq>always</changefreq>
|
||||
</url>
|
||||
<url>
|
||||
<loc>https://quay.io/guide/</loc>
|
||||
<changefreq>weekly</changefreq>
|
||||
</url>
|
||||
<url>
|
||||
<loc>https://quay.io/tos</loc>
|
||||
<changefreq>monthly</changefreq>
|
||||
|
|
378
test/specs.py
|
@ -103,320 +103,328 @@ class TestSpec(object):
|
|||
|
||||
def build_specs():
|
||||
return [
|
||||
TestSpec(url_for('welcome'), 200, 200, 200, 200),
|
||||
TestSpec(url_for('api.welcome'), 200, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('list_plans'), 200, 200, 200, 200),
|
||||
TestSpec(url_for('api.list_plans'), 200, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_logged_in_user'), 200, 200, 200, 200),
|
||||
TestSpec(url_for('api.get_logged_in_user'), 200, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('change_user_details'),
|
||||
TestSpec(url_for('api.change_user_details'),
|
||||
401, 200, 200, 200).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('create_new_user'), 201, 201, 201,
|
||||
TestSpec(url_for('api.create_new_user'), 201, 201, 201,
|
||||
201).set_method('POST').set_data_from_obj(NEW_USER_DETAILS),
|
||||
|
||||
TestSpec(url_for('signin_user'), 200, 200, 200,
|
||||
TestSpec(url_for('api.signin_user'), 200, 200, 200,
|
||||
200).set_method('POST').set_data_from_obj(SIGNIN_DETAILS),
|
||||
|
||||
TestSpec(url_for('request_recovery_email'), 201, 201, 201,
|
||||
TestSpec(url_for('api.request_recovery_email'), 201, 201, 201,
|
||||
201).set_method('POST').set_data_from_obj(SEND_RECOVERY_DETAILS),
|
||||
|
||||
TestSpec(url_for('get_matching_users', prefix='dev'), 401, 200, 200, 200),
|
||||
TestSpec(url_for('api.get_matching_users', prefix='dev'),
|
||||
401, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_matching_entities', prefix='dev'), 401, 200, 200,
|
||||
TestSpec(url_for('api.get_matching_entities', prefix='dev'), 401, 200, 200,
|
||||
200),
|
||||
|
||||
TestSpec(url_for('get_organization', orgname=ORG), 401, 403, 200, 200),
|
||||
TestSpec(url_for('api.get_organization', orgname=ORG), 401, 403, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_organization_private_allowed', orgname=ORG)),
|
||||
TestSpec(url_for('api.get_organization_private_allowed', orgname=ORG)),
|
||||
|
||||
TestSpec(url_for('update_organization_team', orgname=ORG,
|
||||
TestSpec(url_for('api.update_organization_team', orgname=ORG,
|
||||
teamname=ORG_OWNERS)).set_method('PUT'),
|
||||
TestSpec(url_for('update_organization_team', orgname=ORG,
|
||||
TestSpec(url_for('api.update_organization_team', orgname=ORG,
|
||||
teamname=ORG_READERS)).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('delete_organization_team', orgname=ORG,
|
||||
TestSpec(url_for('api.delete_organization_team', orgname=ORG,
|
||||
teamname=ORG_OWNERS),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_organization_team', orgname=ORG,
|
||||
TestSpec(url_for('api.delete_organization_team', orgname=ORG,
|
||||
teamname=ORG_READERS),
|
||||
admin_code=204).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('get_organization_team_members', orgname=ORG,
|
||||
TestSpec(url_for('api.get_organization_team_members', orgname=ORG,
|
||||
teamname=ORG_OWNERS)),
|
||||
TestSpec(url_for('get_organization_team_members', orgname=ORG,
|
||||
TestSpec(url_for('api.get_organization_team_members', orgname=ORG,
|
||||
teamname=ORG_READERS), read_code=200),
|
||||
|
||||
TestSpec(url_for('update_organization_team_member', orgname=ORG,
|
||||
TestSpec(url_for('api.update_organization_team_member', orgname=ORG,
|
||||
teamname=ORG_OWNERS, membername=ORG_OWNER),
|
||||
admin_code=400).set_method('PUT'),
|
||||
TestSpec(url_for('update_organization_team_member', orgname=ORG,
|
||||
TestSpec(url_for('api.update_organization_team_member', orgname=ORG,
|
||||
teamname=ORG_READERS,
|
||||
membername=ORG_OWNER)).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('delete_organization_team_member', orgname=ORG,
|
||||
TestSpec(url_for('api.delete_organization_team_member', orgname=ORG,
|
||||
teamname=ORG_OWNERS, membername=ORG_OWNER),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_organization_team_member', orgname=ORG,
|
||||
TestSpec(url_for('api.delete_organization_team_member', orgname=ORG,
|
||||
teamname=ORG_READERS, membername=ORG_OWNER),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
|
||||
(TestSpec(url_for('create_repo'))
|
||||
(TestSpec(url_for('api.create_repo'))
|
||||
.set_method('POST')
|
||||
.set_data_from_obj(NEW_ORG_REPO_DETAILS)),
|
||||
|
||||
TestSpec(url_for('find_repos'), 200, 200, 200, 200),
|
||||
TestSpec(url_for('api.find_repos'), 200, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('list_repos'), 200, 200, 200, 200),
|
||||
TestSpec(url_for('api.list_repos'), 200, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('update_repo', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.update_repo', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('PUT'),
|
||||
(TestSpec(url_for('update_repo', repository=ORG_REPO))
|
||||
(TestSpec(url_for('api.update_repo', repository=ORG_REPO))
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(UPDATE_REPO_DETAILS)),
|
||||
(TestSpec(url_for('update_repo', repository=PRIVATE_REPO))
|
||||
(TestSpec(url_for('api.update_repo', repository=PRIVATE_REPO))
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(UPDATE_REPO_DETAILS)),
|
||||
|
||||
(TestSpec(url_for('change_repo_visibility', repository=PUBLIC_REPO),
|
||||
(TestSpec(url_for('api.change_repo_visibility', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('POST')
|
||||
.set_data_from_obj(CHANGE_VISIBILITY_DETAILS)),
|
||||
(TestSpec(url_for('change_repo_visibility', repository=ORG_REPO))
|
||||
(TestSpec(url_for('api.change_repo_visibility', repository=ORG_REPO))
|
||||
.set_method('POST').set_data_from_obj(CHANGE_VISIBILITY_DETAILS)),
|
||||
(TestSpec(url_for('change_repo_visibility', repository=PRIVATE_REPO))
|
||||
(TestSpec(url_for('api.change_repo_visibility', repository=PRIVATE_REPO))
|
||||
.set_method('POST').set_data_from_obj(CHANGE_VISIBILITY_DETAILS)),
|
||||
|
||||
TestSpec(url_for('delete_repository', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.delete_repository', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_repository', repository=ORG_REPO),
|
||||
TestSpec(url_for('api.delete_repository', repository=ORG_REPO),
|
||||
admin_code=204).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_repository', repository=PRIVATE_REPO),
|
||||
TestSpec(url_for('api.delete_repository', repository=PRIVATE_REPO),
|
||||
admin_code=204).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('get_repo', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.get_repo', repository=PUBLIC_REPO),
|
||||
200, 200, 200,200),
|
||||
TestSpec(url_for('get_repo', repository=ORG_REPO),
|
||||
TestSpec(url_for('api.get_repo', repository=ORG_REPO),
|
||||
403, 403, 200, 200),
|
||||
TestSpec(url_for('get_repo', repository=PRIVATE_REPO),
|
||||
TestSpec(url_for('api.get_repo', repository=PRIVATE_REPO),
|
||||
403, 403, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_repo_builds', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('get_repo_builds', repository=ORG_REPO)),
|
||||
TestSpec(url_for('get_repo_builds', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.get_repo_builds', repository=PUBLIC_REPO),
|
||||
200, 200, 200, 200),
|
||||
TestSpec(url_for('api.get_repo_builds', repository=ORG_REPO),
|
||||
403, 403, 200, 200),
|
||||
TestSpec(url_for('api.get_repo_builds', repository=PRIVATE_REPO),
|
||||
403, 403, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_filedrop_url'), 401, 200, 200,
|
||||
TestSpec(url_for('api.get_filedrop_url'), 401, 200, 200,
|
||||
200).set_method('POST').set_data_from_obj(FILE_DROP_DETAILS),
|
||||
|
||||
(TestSpec(url_for('request_repo_build', repository=PUBLIC_REPO),
|
||||
(TestSpec(url_for('api.request_repo_build', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('POST')
|
||||
.set_data_from_obj(CREATE_BUILD_DETAILS)),
|
||||
(TestSpec(url_for('request_repo_build', repository=ORG_REPO),
|
||||
(TestSpec(url_for('api.request_repo_build', repository=ORG_REPO),
|
||||
admin_code=201).set_method('POST')
|
||||
.set_data_from_obj(CREATE_BUILD_DETAILS)),
|
||||
(TestSpec(url_for('request_repo_build', repository=PRIVATE_REPO),
|
||||
(TestSpec(url_for('api.request_repo_build', repository=PRIVATE_REPO),
|
||||
admin_code=201).set_method('POST')
|
||||
.set_data_from_obj(CREATE_BUILD_DETAILS)),
|
||||
|
||||
TestSpec(url_for('create_webhook', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.create_webhook', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('POST'),
|
||||
TestSpec(url_for('create_webhook',
|
||||
TestSpec(url_for('api.create_webhook',
|
||||
repository=ORG_REPO)).set_method('POST'),
|
||||
TestSpec(url_for('create_webhook',
|
||||
TestSpec(url_for('api.create_webhook',
|
||||
repository=PRIVATE_REPO)).set_method('POST'),
|
||||
|
||||
TestSpec(url_for('get_webhook', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_webhook', repository=PUBLIC_REPO,
|
||||
public_id=FAKE_WEBHOOK), admin_code=403),
|
||||
TestSpec(url_for('get_webhook', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_webhook', repository=ORG_REPO,
|
||||
public_id=FAKE_WEBHOOK), admin_code=400),
|
||||
TestSpec(url_for('get_webhook', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_webhook', repository=PRIVATE_REPO,
|
||||
public_id=FAKE_WEBHOOK), admin_code=400),
|
||||
|
||||
TestSpec(url_for('list_webhooks', repository=PUBLIC_REPO), admin_code=403),
|
||||
TestSpec(url_for('list_webhooks', repository=ORG_REPO)),
|
||||
TestSpec(url_for('list_webhooks', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.list_webhooks', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('api.list_webhooks', repository=ORG_REPO)),
|
||||
TestSpec(url_for('api.list_webhooks', repository=PRIVATE_REPO)),
|
||||
|
||||
TestSpec(url_for('delete_webhook', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.delete_webhook', repository=PUBLIC_REPO,
|
||||
public_id=FAKE_WEBHOOK),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_webhook', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.delete_webhook', repository=ORG_REPO,
|
||||
public_id=FAKE_WEBHOOK),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_webhook', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.delete_webhook', repository=PRIVATE_REPO,
|
||||
public_id=FAKE_WEBHOOK),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('list_repository_images', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.list_repository_images', repository=PUBLIC_REPO),
|
||||
200, 200, 200, 200),
|
||||
TestSpec(url_for('list_repository_images', repository=ORG_REPO),
|
||||
TestSpec(url_for('api.list_repository_images', repository=ORG_REPO),
|
||||
403, 403, 200, 200),
|
||||
TestSpec(url_for('list_repository_images', repository=PRIVATE_REPO),
|
||||
TestSpec(url_for('api.list_repository_images', repository=PRIVATE_REPO),
|
||||
403, 403, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_image', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_image', repository=PUBLIC_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 404, 404, 404, 404),
|
||||
TestSpec(url_for('get_image', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_image', repository=ORG_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 403, 403, 404, 404),
|
||||
TestSpec(url_for('get_image', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_image', repository=PRIVATE_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 403, 403, 404, 404),
|
||||
|
||||
TestSpec(url_for('get_image_changes', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_image_changes', repository=PUBLIC_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 404, 404, 404, 404),
|
||||
TestSpec(url_for('get_image_changes', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_image_changes', repository=ORG_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 403, 403, 404, 404),
|
||||
TestSpec(url_for('get_image_changes', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_image_changes', repository=PRIVATE_REPO,
|
||||
image_id=FAKE_IMAGE_ID), 403, 403, 404, 404),
|
||||
|
||||
TestSpec(url_for('list_tag_images', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.list_tag_images', repository=PUBLIC_REPO,
|
||||
tag=FAKE_TAG_NAME), 404, 404, 404, 404),
|
||||
TestSpec(url_for('list_tag_images', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.list_tag_images', repository=ORG_REPO,
|
||||
tag=FAKE_TAG_NAME), 403, 403, 404, 404),
|
||||
TestSpec(url_for('list_tag_images', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.list_tag_images', repository=PRIVATE_REPO,
|
||||
tag=FAKE_TAG_NAME), 403, 403, 404, 404),
|
||||
|
||||
TestSpec(url_for('list_repo_team_permissions', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.list_repo_team_permissions', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('list_repo_team_permissions', repository=ORG_REPO)),
|
||||
TestSpec(url_for('list_repo_team_permissions', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.list_repo_team_permissions', repository=ORG_REPO)),
|
||||
TestSpec(url_for('api.list_repo_team_permissions',
|
||||
repository=PRIVATE_REPO)),
|
||||
|
||||
TestSpec(url_for('list_repo_user_permissions', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.list_repo_user_permissions', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('list_repo_user_permissions', repository=ORG_REPO)),
|
||||
TestSpec(url_for('list_repo_user_permissions', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.list_repo_user_permissions', repository=ORG_REPO)),
|
||||
TestSpec(url_for('api.list_repo_user_permissions',
|
||||
repository=PRIVATE_REPO)),
|
||||
|
||||
TestSpec(url_for('get_user_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_user_permissions', repository=PUBLIC_REPO,
|
||||
username=FAKE_USERNAME), admin_code=403),
|
||||
TestSpec(url_for('get_user_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_user_permissions', repository=ORG_REPO,
|
||||
username=FAKE_USERNAME), admin_code=400),
|
||||
TestSpec(url_for('get_user_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_user_permissions', repository=PRIVATE_REPO,
|
||||
username=FAKE_USERNAME), admin_code=400),
|
||||
|
||||
TestSpec(url_for('get_team_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_OWNERS), admin_code=403),
|
||||
TestSpec(url_for('get_team_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_READERS), admin_code=403),
|
||||
TestSpec(url_for('get_team_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_OWNERS), admin_code=400),
|
||||
TestSpec(url_for('get_team_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_READERS)),
|
||||
TestSpec(url_for('get_team_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_OWNERS), admin_code=400),
|
||||
TestSpec(url_for('get_team_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.get_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_READERS), admin_code=400),
|
||||
|
||||
TestSpec(url_for('change_user_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.change_user_permissions', repository=PUBLIC_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=403).set_method('PUT'),
|
||||
TestSpec(url_for('change_user_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.change_user_permissions', repository=ORG_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=400).set_method('PUT'),
|
||||
TestSpec(url_for('change_user_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.change_user_permissions', repository=PRIVATE_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=400).set_method('PUT'),
|
||||
|
||||
(TestSpec(url_for('change_team_permissions', repository=PUBLIC_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_OWNERS), admin_code=403)
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
(TestSpec(url_for('change_team_permissions', repository=PUBLIC_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_READERS), admin_code=403)
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
(TestSpec(url_for('change_team_permissions', repository=ORG_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_OWNERS))
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
(TestSpec(url_for('change_team_permissions', repository=ORG_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_READERS))
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
(TestSpec(url_for('change_team_permissions', repository=PRIVATE_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_OWNERS), admin_code=400)
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
(TestSpec(url_for('change_team_permissions', repository=PRIVATE_REPO,
|
||||
(TestSpec(url_for('api.change_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_READERS), admin_code=400)
|
||||
.set_method('PUT')
|
||||
.set_data_from_obj(CHANGE_PERMISSION_DETAILS)),
|
||||
|
||||
TestSpec(url_for('delete_user_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.delete_user_permissions', repository=PUBLIC_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_user_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.delete_user_permissions', repository=ORG_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_user_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.delete_user_permissions', repository=PRIVATE_REPO,
|
||||
username=FAKE_USERNAME),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('delete_team_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_OWNERS),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_team_permissions', repository=PUBLIC_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=PUBLIC_REPO,
|
||||
teamname=ORG_READERS),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_team_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_OWNERS),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_team_permissions', repository=ORG_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=ORG_REPO,
|
||||
teamname=ORG_READERS),
|
||||
admin_code=204).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_team_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_OWNERS),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_team_permissions', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.delete_team_permissions', repository=PRIVATE_REPO,
|
||||
teamname=ORG_READERS),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('list_repo_tokens', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.list_repo_tokens', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('list_repo_tokens', repository=ORG_REPO)),
|
||||
TestSpec(url_for('list_repo_tokens', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.list_repo_tokens', repository=ORG_REPO)),
|
||||
TestSpec(url_for('api.list_repo_tokens', repository=PRIVATE_REPO)),
|
||||
|
||||
TestSpec(url_for('get_tokens', repository=PUBLIC_REPO, code=FAKE_TOKEN),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('get_tokens', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
admin_code=400),
|
||||
TestSpec(url_for('get_tokens', repository=PRIVATE_REPO, code=FAKE_TOKEN),
|
||||
TestSpec(url_for('api.get_tokens', repository=PUBLIC_REPO,
|
||||
code=FAKE_TOKEN), admin_code=403),
|
||||
TestSpec(url_for('api.get_tokens', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
admin_code=400),
|
||||
TestSpec(url_for('api.get_tokens', repository=PRIVATE_REPO,
|
||||
code=FAKE_TOKEN), admin_code=400),
|
||||
|
||||
TestSpec(url_for('create_token', repository=PUBLIC_REPO),
|
||||
TestSpec(url_for('api.create_token', repository=PUBLIC_REPO),
|
||||
admin_code=403).set_method('POST'),
|
||||
(TestSpec(url_for('create_token', repository=ORG_REPO),
|
||||
(TestSpec(url_for('api.create_token', repository=ORG_REPO),
|
||||
admin_code=201).set_method('POST')
|
||||
.set_data_from_obj(CREATE_TOKEN_DETAILS)),
|
||||
(TestSpec(url_for('create_token', repository=PRIVATE_REPO),
|
||||
(TestSpec(url_for('api.create_token', repository=PRIVATE_REPO),
|
||||
admin_code=201).set_method('POST')
|
||||
.set_data_from_obj(CREATE_TOKEN_DETAILS)),
|
||||
|
||||
TestSpec(url_for('change_token', repository=PUBLIC_REPO, code=FAKE_TOKEN),
|
||||
admin_code=403).set_method('PUT'),
|
||||
TestSpec(url_for('change_token', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
TestSpec(url_for('api.change_token', repository=PUBLIC_REPO,
|
||||
code=FAKE_TOKEN), admin_code=403).set_method('PUT'),
|
||||
TestSpec(url_for('api.change_token', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
admin_code=400).set_method('PUT'),
|
||||
TestSpec(url_for('change_token', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.change_token', repository=PRIVATE_REPO,
|
||||
code=FAKE_TOKEN), admin_code=400).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('delete_token', repository=PUBLIC_REPO, code=FAKE_TOKEN),
|
||||
admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_token', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
TestSpec(url_for('api.delete_token', repository=PUBLIC_REPO,
|
||||
code=FAKE_TOKEN), admin_code=403).set_method('DELETE'),
|
||||
TestSpec(url_for('api.delete_token', repository=ORG_REPO, code=FAKE_TOKEN),
|
||||
admin_code=400).set_method('DELETE'),
|
||||
TestSpec(url_for('delete_token', repository=PRIVATE_REPO,
|
||||
TestSpec(url_for('api.delete_token', repository=PRIVATE_REPO,
|
||||
code=FAKE_TOKEN), admin_code=400).set_method('DELETE'),
|
||||
|
||||
TestSpec(url_for('update_user_subscription'), 401, 400, 400, 400).set_method('PUT'),
|
||||
TestSpec(url_for('api.update_user_subscription'),
|
||||
401, 400, 400, 400).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('update_org_subscription', orgname=ORG),
|
||||
TestSpec(url_for('api.update_org_subscription', orgname=ORG),
|
||||
401, 403, 403, 400).set_method('PUT'),
|
||||
|
||||
TestSpec(url_for('get_user_subscription'), 401, 200, 200, 200),
|
||||
TestSpec(url_for('api.get_user_subscription'), 401, 200, 200, 200),
|
||||
|
||||
TestSpec(url_for('get_org_subscription', orgname=ORG)),
|
||||
TestSpec(url_for('api.get_org_subscription', orgname=ORG)),
|
||||
|
||||
TestSpec(url_for('list_repo_logs', repository=PUBLIC_REPO), admin_code=403),
|
||||
TestSpec(url_for('list_repo_logs', repository=ORG_REPO)),
|
||||
TestSpec(url_for('list_repo_logs', repository=PRIVATE_REPO)),
|
||||
TestSpec(url_for('api.list_repo_logs', repository=PUBLIC_REPO),
|
||||
admin_code=403),
|
||||
TestSpec(url_for('api.list_repo_logs', repository=ORG_REPO)),
|
||||
TestSpec(url_for('api.list_repo_logs', repository=PRIVATE_REPO)),
|
||||
|
||||
TestSpec(url_for('list_org_logs', orgname=ORG)),
|
||||
TestSpec(url_for('api.list_org_logs', orgname=ORG)),
|
||||
]
|
||||
|
||||
|
||||
|
@ -460,120 +468,132 @@ class IndexTestSpec(object):
|
|||
|
||||
def build_index_specs():
|
||||
return [
|
||||
IndexTestSpec(url_for('get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 200, 200, 200, 200),
|
||||
IndexTestSpec(url_for('get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO),
|
||||
IndexTestSpec(url_for('get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO),
|
||||
|
||||
IndexTestSpec(url_for('put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO, 403, 403, 403, 404).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_layer', image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO, 403, 403, 403, 404).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('put_image_checksum', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_checksum',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_checksum', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_checksum',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_checksum', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_checksum',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 404, 404, 404, 404),
|
||||
IndexTestSpec(url_for('get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO, 403, 403, 404, 404),
|
||||
IndexTestSpec(url_for('get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_json', image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO, 403, 403, 404, 404),
|
||||
|
||||
IndexTestSpec(url_for('get_image_ancestry', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_ancestry',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 404, 404, 404, 404),
|
||||
IndexTestSpec(url_for('get_image_ancestry', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_ancestry',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO, 403, 403, 404, 404),
|
||||
IndexTestSpec(url_for('get_image_ancestry', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.get_image_ancestry',
|
||||
image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO, 403, 403, 404, 404),
|
||||
|
||||
IndexTestSpec(url_for('put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
PUBLIC_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
PRIVATE_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
IndexTestSpec(url_for('registry.put_image_json', image_id=FAKE_IMAGE_ID),
|
||||
ORG_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('create_user'), NO_REPO, 201, 201, 201,
|
||||
IndexTestSpec(url_for('index.create_user'), NO_REPO, 201, 201, 201,
|
||||
201).set_method('POST').set_data_from_obj(NEW_USER_DETAILS),
|
||||
|
||||
IndexTestSpec(url_for('get_user'), NO_REPO, 404, 200, 200, 200),
|
||||
IndexTestSpec(url_for('index.get_user'), NO_REPO, 404, 200, 200, 200),
|
||||
|
||||
IndexTestSpec(url_for('update_user', username=FAKE_USERNAME),
|
||||
IndexTestSpec(url_for('index.update_user', username=FAKE_USERNAME),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('create_repository', repository=PUBLIC_REPO),
|
||||
IndexTestSpec(url_for('index.create_repository', repository=PUBLIC_REPO),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('create_repository', repository=PRIVATE_REPO),
|
||||
IndexTestSpec(url_for('index.create_repository', repository=PRIVATE_REPO),
|
||||
NO_REPO, 403, 403, 403, 201).set_method('PUT'),
|
||||
IndexTestSpec(url_for('create_repository', repository=ORG_REPO),
|
||||
IndexTestSpec(url_for('index.create_repository', repository=ORG_REPO),
|
||||
NO_REPO, 403, 403, 403, 201).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('update_images', repository=PUBLIC_REPO), NO_REPO,
|
||||
403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('update_images', repository=PRIVATE_REPO), NO_REPO,
|
||||
403, 403, 403, 204).set_method('PUT'),
|
||||
IndexTestSpec(url_for('update_images', repository=ORG_REPO), NO_REPO,
|
||||
IndexTestSpec(url_for('index.update_images', repository=PUBLIC_REPO),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('index.update_images', repository=PRIVATE_REPO),
|
||||
NO_REPO, 403, 403, 403, 204).set_method('PUT'),
|
||||
IndexTestSpec(url_for('index.update_images', repository=ORG_REPO), NO_REPO,
|
||||
403, 403, 403, 204).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('get_repository_images', repository=PUBLIC_REPO),
|
||||
IndexTestSpec(url_for('index.get_repository_images',
|
||||
repository=PUBLIC_REPO),
|
||||
NO_REPO, 200, 200, 200, 200),
|
||||
IndexTestSpec(url_for('get_repository_images', repository=PRIVATE_REPO)),
|
||||
IndexTestSpec(url_for('get_repository_images', repository=ORG_REPO)),
|
||||
IndexTestSpec(url_for('index.get_repository_images',
|
||||
repository=PRIVATE_REPO)),
|
||||
IndexTestSpec(url_for('index.get_repository_images', repository=ORG_REPO)),
|
||||
|
||||
IndexTestSpec(url_for('delete_repository_images', repository=PUBLIC_REPO),
|
||||
IndexTestSpec(url_for('index.delete_repository_images',
|
||||
repository=PUBLIC_REPO),
|
||||
NO_REPO, 501, 501, 501, 501).set_method('DELETE'),
|
||||
|
||||
IndexTestSpec(url_for('put_repository_auth', repository=PUBLIC_REPO),
|
||||
IndexTestSpec(url_for('index.put_repository_auth', repository=PUBLIC_REPO),
|
||||
NO_REPO, 501, 501, 501, 501).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('get_search'), NO_REPO, 501, 501, 501, 501),
|
||||
IndexTestSpec(url_for('index.get_search'), NO_REPO, 501, 501, 501, 501),
|
||||
|
||||
IndexTestSpec(url_for('ping'), NO_REPO, 200, 200, 200, 200),
|
||||
IndexTestSpec(url_for('index.ping'), NO_REPO, 200, 200, 200, 200),
|
||||
|
||||
IndexTestSpec(url_for('get_tags', repository=PUBLIC_REPO), NO_REPO,
|
||||
IndexTestSpec(url_for('tags.get_tags', repository=PUBLIC_REPO), NO_REPO,
|
||||
200, 200, 200, 200),
|
||||
IndexTestSpec(url_for('get_tags', repository=PRIVATE_REPO)),
|
||||
IndexTestSpec(url_for('get_tags', repository=ORG_REPO)),
|
||||
IndexTestSpec(url_for('tags.get_tags', repository=PRIVATE_REPO)),
|
||||
IndexTestSpec(url_for('tags.get_tags', repository=ORG_REPO)),
|
||||
|
||||
IndexTestSpec(url_for('get_tag', repository=PUBLIC_REPO,
|
||||
IndexTestSpec(url_for('tags.get_tag', repository=PUBLIC_REPO,
|
||||
tag=FAKE_TAG_NAME), NO_REPO, 400, 400, 400, 400),
|
||||
IndexTestSpec(url_for('get_tag', repository=PRIVATE_REPO,
|
||||
IndexTestSpec(url_for('tags.get_tag', repository=PRIVATE_REPO,
|
||||
tag=FAKE_TAG_NAME), NO_REPO, 403, 403, 400, 400),
|
||||
IndexTestSpec(url_for('get_tag', repository=ORG_REPO,
|
||||
IndexTestSpec(url_for('tags.get_tag', repository=ORG_REPO,
|
||||
tag=FAKE_TAG_NAME), NO_REPO, 403, 403, 400, 400),
|
||||
|
||||
IndexTestSpec(url_for('put_tag', repository=PUBLIC_REPO,
|
||||
IndexTestSpec(url_for('tags.put_tag', repository=PUBLIC_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_tag', repository=PRIVATE_REPO,
|
||||
IndexTestSpec(url_for('tags.put_tag', repository=PRIVATE_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
IndexTestSpec(url_for('put_tag', repository=ORG_REPO, tag=FAKE_TAG_NAME),
|
||||
IndexTestSpec(url_for('tags.put_tag', repository=ORG_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 400).set_method('PUT'),
|
||||
|
||||
IndexTestSpec(url_for('delete_tag', repository=PUBLIC_REPO,
|
||||
IndexTestSpec(url_for('tags.delete_tag', repository=PUBLIC_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('DELETE'),
|
||||
IndexTestSpec(url_for('delete_tag', repository=PRIVATE_REPO,
|
||||
IndexTestSpec(url_for('tags.delete_tag', repository=PRIVATE_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 400).set_method('DELETE'),
|
||||
IndexTestSpec(url_for('delete_tag', repository=ORG_REPO,
|
||||
IndexTestSpec(url_for('tags.delete_tag', repository=ORG_REPO,
|
||||
tag=FAKE_TAG_NAME),
|
||||
NO_REPO, 403, 403, 403, 400).set_method('DELETE'),
|
||||
|
||||
IndexTestSpec(url_for('delete_repository_tags', repository=PUBLIC_REPO),
|
||||
IndexTestSpec(url_for('tags.delete_repository_tags',
|
||||
repository=PUBLIC_REPO),
|
||||
NO_REPO, 403, 403, 403, 403).set_method('DELETE'),
|
||||
IndexTestSpec(url_for('delete_repository_tags', repository=PRIVATE_REPO),
|
||||
IndexTestSpec(url_for('tags.delete_repository_tags',
|
||||
repository=PRIVATE_REPO),
|
||||
NO_REPO, 403, 403, 403, 204).set_method('DELETE'),
|
||||
IndexTestSpec(url_for('delete_repository_tags', repository=ORG_REPO),
|
||||
IndexTestSpec(url_for('tags.delete_repository_tags', repository=ORG_REPO),
|
||||
NO_REPO, 403, 403, 403, 204).set_method('DELETE'),
|
||||
]
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import unittest
|
||||
import json
|
||||
|
||||
import endpoints.api
|
||||
|
||||
from endpoints.api import api
|
||||
from app import app
|
||||
from initdb import wipe_database, initialize_database, populate_database
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from specs import build_specs
|
||||
|
||||
|
||||
app.register_blueprint(api, url_prefix='/api')
|
||||
|
||||
|
||||
NO_ACCESS_USER = 'freshuser'
|
||||
READ_ACCESS_USER = 'reader'
|
||||
ADMIN_ACCESS_USER = 'devtable'
|
||||
|
@ -15,9 +16,10 @@ ADMIN_ACCESS_USER = 'devtable'
|
|||
|
||||
class ApiTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database()
|
||||
setup_database_for_testing(self)
|
||||
|
||||
def tearDown(self):
|
||||
finished_database_for_testing(self)
|
||||
|
||||
|
||||
class _SpecTestBuilder(type):
|
||||
|
@ -27,8 +29,10 @@ class _SpecTestBuilder(type):
|
|||
with app.test_client() as c:
|
||||
if auth_username:
|
||||
# Temporarily remove the teardown functions
|
||||
teardown_funcs = app.teardown_request_funcs[None]
|
||||
app.teardown_request_funcs[None] = []
|
||||
teardown_funcs = []
|
||||
if None in app.teardown_request_funcs:
|
||||
teardown_funcs = app.teardown_request_funcs[None]
|
||||
app.teardown_request_funcs[None] = []
|
||||
|
||||
with c.session_transaction() as sess:
|
||||
sess['user_id'] = auth_username
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
import unittest
|
||||
|
||||
import endpoints.registry
|
||||
import endpoints.index
|
||||
import endpoints.tags
|
||||
|
||||
from app import app
|
||||
from util.names import parse_namespace_repository
|
||||
from initdb import wipe_database, initialize_database, populate_database
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from specs import build_index_specs
|
||||
from endpoints.registry import registry
|
||||
from endpoints.index import index
|
||||
from endpoints.tags import tags
|
||||
|
||||
|
||||
app.register_blueprint(index, url_prefix='/v1')
|
||||
app.register_blueprint(tags, url_prefix='/v1')
|
||||
app.register_blueprint(registry, url_prefix='/v1')
|
||||
|
||||
|
||||
NO_ACCESS_USER = 'freshuser'
|
||||
|
@ -16,10 +20,11 @@ ADMIN_ACCESS_USER = 'devtable'
|
|||
|
||||
|
||||
class EndpointTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database()
|
||||
def setUp(self):
|
||||
setup_database_for_testing(self)
|
||||
|
||||
def tearDown(self):
|
||||
finished_database_for_testing(self)
|
||||
|
||||
|
||||
class _SpecTestBuilder(type):
|
||||
|
@ -29,8 +34,10 @@ class _SpecTestBuilder(type):
|
|||
with app.test_client() as c:
|
||||
if session_var_list:
|
||||
# Temporarily remove the teardown functions
|
||||
teardown_funcs = app.teardown_request_funcs[None]
|
||||
app.teardown_request_funcs[None] = []
|
||||
teardown_funcs = []
|
||||
if None in app.teardown_request_funcs:
|
||||
teardown_funcs = app.teardown_request_funcs[None]
|
||||
app.teardown_request_funcs[None] = []
|
||||
|
||||
with c.session_transaction() as sess:
|
||||
for sess_key, sess_val in session_var_list:
|
||||
|
|
28
tools/sendconfirmemail.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
from app import stripe
|
||||
from app import app
|
||||
|
||||
from util.email import send_confirmation_email
|
||||
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask_mail import Mail
|
||||
|
||||
def sendConfirmation(username):
|
||||
user = model.get_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
code = model.create_confirm_email_code(user)
|
||||
send_confirmation_email(user.username, user.email, code.code)
|
||||
print 'Email sent to %s' % (user.email)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Sends a confirmation email')
|
||||
parser.add_argument('username', help='The username')
|
||||
args = parser.parse_args()
|
||||
sendConfirmation(args.username)
|
|
@ -12,7 +12,11 @@ ALLOWED_TYPES = {tarfile.REGTYPE, tarfile.AREGTYPE}
|
|||
|
||||
|
||||
def files_and_dirs_from_tar(source_stream, removed_prefix_collector):
|
||||
tar_stream = tarfile.open(mode='r|*', fileobj=source_stream)
|
||||
try:
|
||||
tar_stream = tarfile.open(mode='r|*', fileobj=source_stream)
|
||||
except tarfile.ReadError:
|
||||
# Empty tar file
|
||||
return
|
||||
|
||||
for tar_info in tar_stream:
|
||||
absolute = os.path.relpath(tar_info.name.decode('utf-8'), './')
|
||||
|
|
23
util/glogger.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import logging
|
||||
import logstash_formatter
|
||||
import gunicorn.glogging
|
||||
|
||||
from gunicorn import util
|
||||
|
||||
class LogstashLogger(gunicorn.glogging.Logger):
|
||||
def _set_handler(self, log, output, fmt):
|
||||
# remove previous gunicorn log handler
|
||||
h = self._get_gunicorn_handler(log)
|
||||
if h:
|
||||
log.handlers.remove(h)
|
||||
|
||||
if output is not None:
|
||||
if output == "-":
|
||||
h = logging.StreamHandler()
|
||||
else:
|
||||
util.check_is_writeable(output)
|
||||
h = logging.FileHandler(output)
|
||||
|
||||
h.setFormatter(logstash_formatter.LogstashFormatter())
|
||||
h._gunicorn = True
|
||||
log.addHandler(h)
|
59
util/http.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
import logging
|
||||
|
||||
from app import mixpanel
|
||||
from flask import request, abort as flask_abort, jsonify
|
||||
from auth.auth import get_authenticated_user, get_validated_token
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_MESSAGE = {}
|
||||
DEFAULT_MESSAGE[400] = 'Invalid Request'
|
||||
DEFAULT_MESSAGE[401] = 'Unauthorized'
|
||||
DEFAULT_MESSAGE[403] = 'Permission Denied'
|
||||
DEFAULT_MESSAGE[404] = 'Not Found'
|
||||
DEFAULT_MESSAGE[409] = 'Conflict'
|
||||
DEFAULT_MESSAGE[501] = 'Not Implemented'
|
||||
|
||||
def abort(status_code, message=None, issue=None, **kwargs):
|
||||
message = (str(message) % kwargs if message else
|
||||
DEFAULT_MESSAGE.get(status_code, ''))
|
||||
|
||||
params = dict(request.view_args)
|
||||
params.update(kwargs)
|
||||
|
||||
params['url'] = request.url
|
||||
params['status_code'] = status_code
|
||||
params['message'] = message
|
||||
|
||||
# Add the user information.
|
||||
auth_user = get_authenticated_user()
|
||||
auth_token = get_validated_token()
|
||||
if auth_user:
|
||||
mixpanel.track(auth_user.username, 'http_error', params)
|
||||
message = '%s (user: %s)' % (message, auth_user.username)
|
||||
elif auth_token:
|
||||
mixpanel.track(auth_token.code, 'http_error', params)
|
||||
message = '%s (token: %s)' % (message,
|
||||
auth_token.friendly_name or auth_token.code)
|
||||
|
||||
# Log the abort.
|
||||
logger.error('Error %s: %s; Arguments: %s' % (status_code, message, params))
|
||||
|
||||
# Calculate the issue URL (if the issue ID was supplied).
|
||||
issue_url = None
|
||||
if issue:
|
||||
issue_url = 'http://docs.quay.io/issues/%s.html' % (issue)
|
||||
|
||||
# Create the final response data and message.
|
||||
data = {}
|
||||
data['error'] = message
|
||||
|
||||
if issue_url:
|
||||
data['info_url'] = issue_url
|
||||
|
||||
resp = jsonify(data)
|
||||
resp.status_code = status_code
|
||||
|
||||
# Report the abort to the user.
|
||||
flask_abort(resp)
|
|
@ -1,20 +1,19 @@
|
|||
import logging
|
||||
import json
|
||||
import daemon
|
||||
import time
|
||||
import argparse
|
||||
import digitalocean
|
||||
import requests
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from apscheduler.scheduler import Scheduler
|
||||
from multiprocessing.pool import ThreadPool
|
||||
from base64 import b64encode
|
||||
from requests.exceptions import ConnectionError
|
||||
from docker import Client, APIError
|
||||
from tempfile import TemporaryFile, mkdtemp
|
||||
from zipfile import ZipFile
|
||||
|
||||
from data.queue import dockerfile_build_queue
|
||||
from data import model
|
||||
from data.database import db as db_connection
|
||||
from workers.worker import Worker
|
||||
from app import app
|
||||
|
||||
|
||||
|
@ -26,234 +25,284 @@ formatter = logging.Formatter(FORMAT)
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BUILD_SERVER_CMD = ('docker run -d -p 5002:5002 ' +
|
||||
'-lxc-conf="lxc.aa_profile=unconfined" ' +
|
||||
'-privileged -e \'RESOURCE_URL=%s\' -e \'TAG=%s\' ' +
|
||||
'-e \'TOKEN=%s\' quay.io/quay/buildserver')
|
||||
user_files = app.config['USERFILES']
|
||||
build_logs = app.config['BUILDLOGS']
|
||||
|
||||
|
||||
def retry_command(to_call, args=[], kwargs={}, retries=5, period=5):
|
||||
try:
|
||||
return to_call(*args, **kwargs)
|
||||
except Exception as ex:
|
||||
if retries:
|
||||
logger.debug('Retrying command after %ss' % period)
|
||||
time.sleep(period)
|
||||
return retry_command(to_call, args, kwargs, retries-1, period)
|
||||
raise ex
|
||||
|
||||
|
||||
def get_status(url):
|
||||
return retry_command(requests.get, [url]).json()['status']
|
||||
|
||||
|
||||
def babysit_builder(request):
|
||||
""" Spin up a build node and ask it to build our job. Retryable errors
|
||||
should return False, while fatal errors should return True.
|
||||
"""
|
||||
try:
|
||||
logger.debug('Starting work item: %s' % request)
|
||||
repository_build = model.get_repository_build(request['build_id'])
|
||||
logger.debug('Request details: %s' % repository_build)
|
||||
|
||||
# Initialize digital ocean API
|
||||
do_client_id = app.config['DO_CLIENT_ID']
|
||||
do_api_key = app.config['DO_CLIENT_SECRET']
|
||||
manager = digitalocean.Manager(client_id=do_client_id, api_key=do_api_key)
|
||||
|
||||
# check if there is already a DO node for this build, if so clean it up
|
||||
old_id = repository_build.build_node_id
|
||||
if old_id:
|
||||
logger.debug('Cleaning up old DO node: %s' % old_id)
|
||||
old_droplet = digitalocean.Droplet(id=old_id, client_id=do_client_id,
|
||||
api_key=do_api_key)
|
||||
retry_command(old_droplet.destroy)
|
||||
|
||||
# Pick the region for the new droplet
|
||||
allowed_regions = app.config['DO_ALLOWED_REGIONS']
|
||||
regions = retry_command(manager.get_all_regions)
|
||||
available_regions = {region.id for region in regions}
|
||||
regions = available_regions.intersection(allowed_regions)
|
||||
if not regions:
|
||||
logger.error('No droplets in our allowed regtions, available: %s' %
|
||||
available_regions)
|
||||
return False
|
||||
|
||||
# start the DO node
|
||||
name = 'dockerfile-build-%s' % repository_build.id
|
||||
logger.debug('Starting DO node: %s' % name)
|
||||
droplet = digitalocean.Droplet(client_id=do_client_id,
|
||||
api_key=do_api_key,
|
||||
name=name,
|
||||
region_id=regions.pop(),
|
||||
image_id=app.config['DO_DOCKER_IMAGE'],
|
||||
size_id=66, # 512MB,
|
||||
backup_active=False)
|
||||
retry_command(droplet.create, [],
|
||||
{'ssh_key_ids': [app.config['DO_SSH_KEY_ID']]})
|
||||
repository_build.build_node_id = droplet.id
|
||||
repository_build.phase = 'starting'
|
||||
repository_build.save()
|
||||
|
||||
logger.debug('Waiting for DO node to be available.')
|
||||
|
||||
startup = retry_command(droplet.get_events)[0]
|
||||
while not startup.percentage or int(startup.percentage) != 100:
|
||||
logger.debug('Droplet startup percentage: %s' % startup.percentage)
|
||||
time.sleep(5)
|
||||
retry_command(startup.load)
|
||||
|
||||
retry_command(droplet.load)
|
||||
logger.debug('Droplet started at ip address: %s' % droplet.ip_address)
|
||||
|
||||
# connect to it with ssh
|
||||
repository_build.phase = 'initializing'
|
||||
repository_build.save()
|
||||
|
||||
# We wait until here to import paramiko because otherwise it doesn't work
|
||||
# under the daemon context.
|
||||
import paramiko
|
||||
ssh_client = paramiko.SSHClient()
|
||||
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
logger.debug('Connecting to droplet through ssh at ip: %s' %
|
||||
droplet.ip_address)
|
||||
retry_command(ssh_client.connect, [droplet.ip_address, 22, 'root'],
|
||||
{'look_for_keys': False, 'timeout': 10.0,
|
||||
'key_filename': app.config['DO_SSH_PRIVATE_KEY_FILENAME']})
|
||||
|
||||
# Load the node with the pull token
|
||||
token = app.config['BUILD_NODE_PULL_TOKEN']
|
||||
basicauth = b64encode('%s:%s' % ('$token', token))
|
||||
auth_object = {
|
||||
'https://quay.io/v1/': {
|
||||
'auth': basicauth,
|
||||
'email': '',
|
||||
},
|
||||
class StatusWrapper(object):
|
||||
def __init__(self, build_uuid):
|
||||
self._uuid = build_uuid
|
||||
self._status = {
|
||||
'total_commands': None,
|
||||
'current_command': None,
|
||||
'push_completion': 0.0,
|
||||
'image_completion': {},
|
||||
}
|
||||
|
||||
create_auth_cmd = 'echo \'%s\' > .dockercfg' % json.dumps(auth_object)
|
||||
ssh_client.exec_command(create_auth_cmd)
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
# Pull and run the buildserver
|
||||
pull_cmd = 'docker pull quay.io/quay/buildserver'
|
||||
_, stdout, _ = ssh_client.exec_command(pull_cmd)
|
||||
pull_status = stdout.channel.recv_exit_status()
|
||||
def __enter__(self):
|
||||
return self._status
|
||||
|
||||
if pull_status != 0:
|
||||
logger.error('Pull command failed for host: %s' % droplet.ip_address)
|
||||
return False
|
||||
else:
|
||||
logger.debug('Pull status was: %s' % pull_status)
|
||||
def __exit__(self, exc_type, value, traceback):
|
||||
build_logs.set_status(self._uuid, self._status)
|
||||
|
||||
# Remove the credentials we used to pull so crafty users cant steal them
|
||||
remove_auth_cmd = 'rm .dockercfg'
|
||||
ssh_client.exec_command(remove_auth_cmd)
|
||||
|
||||
# Prepare the signed resource url the build node can fetch the job from
|
||||
user_files = app.config['USERFILES']
|
||||
class DockerfileBuildContext(object):
|
||||
def __init__(self, build_context_dir, tag_name, push_token, build_uuid):
|
||||
self._build_dir = build_context_dir
|
||||
self._tag_name = tag_name
|
||||
self._push_token = push_token
|
||||
self._build_uuid = build_uuid
|
||||
self._cl = Client(timeout=1200)
|
||||
self._status = StatusWrapper(self._build_uuid)
|
||||
|
||||
dockerfile_path = os.path.join(self._build_dir, "Dockerfile")
|
||||
self._num_steps = DockerfileBuildContext.__count_steps(dockerfile_path)
|
||||
|
||||
logger.debug('Will build and push to tag named: %s' % self._tag_name)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, value, traceback):
|
||||
self.__cleanup()
|
||||
|
||||
shutil.rmtree(self._build_dir)
|
||||
|
||||
@staticmethod
|
||||
def __count_steps(dockerfile_path):
|
||||
with open(dockerfile_path, 'r') as dockerfileobj:
|
||||
steps = 0
|
||||
for line in dockerfileobj.readlines():
|
||||
stripped = line.strip()
|
||||
if stripped and stripped[0] is not '#':
|
||||
steps += 1
|
||||
return steps
|
||||
|
||||
@staticmethod
|
||||
def __total_completion(statuses, total_images):
|
||||
percentage_with_sizes = float(len(statuses.values()))/total_images
|
||||
sent_bytes = sum([status[u'current'] for status in statuses.values()])
|
||||
total_bytes = sum([status[u'total'] for status in statuses.values()])
|
||||
return float(sent_bytes)/total_bytes*percentage_with_sizes
|
||||
|
||||
def build(self):
|
||||
logger.debug('Starting build.')
|
||||
|
||||
with self._status as status:
|
||||
status['total_commands'] = self._num_steps
|
||||
|
||||
logger.debug('Building to tag names: %s' % self._tag_name)
|
||||
build_status = self._cl.build(path=self._build_dir, tag=self._tag_name,
|
||||
stream=True)
|
||||
|
||||
current_step = 0
|
||||
built_image = None
|
||||
for status in build_status:
|
||||
logger.debug('Status: %s', str(status))
|
||||
build_logs.append_log_message(self._build_uuid, str(status))
|
||||
step_increment = re.search(r'Step ([0-9]+) :', status)
|
||||
if step_increment:
|
||||
current_step = int(step_increment.group(1))
|
||||
logger.debug('Step now: %s/%s' % (current_step, self._num_steps))
|
||||
with self._status as status:
|
||||
status['current_command'] = current_step
|
||||
continue
|
||||
|
||||
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
|
||||
if complete:
|
||||
built_image = complete.group(1)
|
||||
logger.debug('Final image ID is: %s' % built_image)
|
||||
continue
|
||||
|
||||
# Get the image count
|
||||
if not built_image:
|
||||
return
|
||||
|
||||
return built_image
|
||||
|
||||
def push(self, built_image):
|
||||
# Login to the registry
|
||||
host = re.match(r'([a-z0-9.:]+)/.+/.+$', self._tag_name)
|
||||
if not host:
|
||||
raise RuntimeError('Invalid tag name: %s' % self._tag_name)
|
||||
|
||||
for protocol in ['https', 'http']:
|
||||
registry_endpoint = '%s://%s/v1/' % (protocol, host.group(1))
|
||||
logger.debug('Attempting login to registry: %s' % registry_endpoint)
|
||||
|
||||
try:
|
||||
self._cl.login('$token', self._push_token, registry=registry_endpoint)
|
||||
break
|
||||
except APIError:
|
||||
pass # Probably the wrong protocol
|
||||
|
||||
history = json.loads(self._cl.history(built_image))
|
||||
num_images = len(history)
|
||||
with self._status as status:
|
||||
status['total_images'] = num_images
|
||||
|
||||
logger.debug('Pushing to tag name: %s' % self._tag_name)
|
||||
resp = self._cl.push(self._tag_name, stream=True)
|
||||
|
||||
for status_str in resp:
|
||||
status = json.loads(status_str)
|
||||
logger.debug('Status: %s', status_str)
|
||||
if u'status' in status:
|
||||
status_msg = status[u'status']
|
||||
|
||||
if status_msg == 'Pushing':
|
||||
if u'progressDetail' in status and u'id' in status:
|
||||
image_id = status[u'id']
|
||||
detail = status[u'progressDetail']
|
||||
|
||||
if u'current' in detail and 'total' in detail:
|
||||
with self._status as status:
|
||||
images = status['image_completion']
|
||||
|
||||
images[image_id] = detail
|
||||
status['push_completion'] = \
|
||||
DockerfileBuildContext.__total_completion(images, num_images)
|
||||
|
||||
elif u'errorDetail' in status:
|
||||
message = 'Error pushing image.'
|
||||
if u'message' in status[u'errorDetail']:
|
||||
message = str(status[u'errorDetail'][u'message'])
|
||||
|
||||
raise RuntimeError(message)
|
||||
|
||||
def __cleanup(self):
|
||||
# First clean up any containers that might be holding the images
|
||||
for running in self._cl.containers(quiet=True):
|
||||
logger.debug('Killing container: %s' % running['Id'])
|
||||
self._cl.kill(running['Id'])
|
||||
|
||||
# Next, remove all of the containers (which should all now be killed)
|
||||
for container in self._cl.containers(all=True, quiet=True):
|
||||
logger.debug('Removing container: %s' % container['Id'])
|
||||
self._cl.remove_container(container['Id'])
|
||||
|
||||
# Iterate all of the images and remove the ones that the public registry
|
||||
# doesn't know about, this should preserve base images.
|
||||
images_to_remove = set()
|
||||
repos = set()
|
||||
for image in self._cl.images():
|
||||
images_to_remove.add(image['Id'])
|
||||
repos.add(image['Repository'])
|
||||
|
||||
for repo in repos:
|
||||
repo_url = 'https://index.docker.io/v1/repositories/%s/images' % repo
|
||||
repo_info = requests.get(repo_url)
|
||||
if repo_info.status_code / 100 == 2:
|
||||
for repo_image in repo_info.json():
|
||||
if repo_image['id'] in images_to_remove:
|
||||
logger.debug('Image was deemed public: %s' % repo_image['id'])
|
||||
images_to_remove.remove(repo_image['id'])
|
||||
|
||||
for to_remove in images_to_remove:
|
||||
logger.debug('Removing private image: %s' % to_remove)
|
||||
try:
|
||||
self._cl.remove_image(to_remove)
|
||||
except APIError:
|
||||
# Sometimes an upstream image removed this one
|
||||
pass
|
||||
|
||||
# Verify that our images were actually removed
|
||||
for image in self._cl.images():
|
||||
if image['Id'] in images_to_remove:
|
||||
raise RuntimeError('Image was not removed: %s' % image['Id'])
|
||||
|
||||
|
||||
class DockerfileBuildWorker(Worker):
|
||||
def __init__(self, *vargs, **kwargs):
|
||||
super(DockerfileBuildWorker, self).__init__(*vargs, **kwargs)
|
||||
|
||||
self._mime_processors = {
|
||||
'application/zip': DockerfileBuildWorker.__prepare_zip,
|
||||
'text/plain': DockerfileBuildWorker.__prepare_dockerfile,
|
||||
'application/octet-stream': DockerfileBuildWorker.__prepare_dockerfile,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def __prepare_zip(request_file):
|
||||
build_dir = mkdtemp(prefix='docker-build-')
|
||||
|
||||
# Save the zip file to temp somewhere
|
||||
with TemporaryFile() as zip_file:
|
||||
zip_file.write(request_file.content)
|
||||
to_extract = ZipFile(zip_file)
|
||||
to_extract.extractall(build_dir)
|
||||
|
||||
return build_dir
|
||||
|
||||
@staticmethod
|
||||
def __prepare_dockerfile(request_file):
|
||||
build_dir = mkdtemp(prefix='docker-build-')
|
||||
dockerfile_path = os.path.join(build_dir, "Dockerfile")
|
||||
with open(dockerfile_path, 'w') as dockerfile:
|
||||
dockerfile.write(request_file.content)
|
||||
|
||||
return build_dir
|
||||
|
||||
def process_queue_item(self, job_details):
|
||||
repository_build = model.get_repository_build(job_details['namespace'],
|
||||
job_details['repository'],
|
||||
job_details['build_uuid'])
|
||||
|
||||
resource_url = user_files.get_file_url(repository_build.resource_key)
|
||||
tag_name = repository_build.tag
|
||||
access_token = repository_build.access_token.code
|
||||
|
||||
# Start the build server
|
||||
start_cmd = BUILD_SERVER_CMD % (resource_url, repository_build.tag,
|
||||
repository_build.access_token.code)
|
||||
logger.debug('Sending build server request with command: %s' % start_cmd)
|
||||
ssh_client.exec_command(start_cmd)
|
||||
start_msg = ('Starting job with resource url: %s tag: %s and token: %s' %
|
||||
(resource_url, tag_name, access_token))
|
||||
logger.debug(start_msg)
|
||||
build_logs.append_log_message(repository_build.uuid, start_msg)
|
||||
|
||||
status_endpoint = 'http://%s:5002/build/' % droplet.ip_address
|
||||
# wait for the server to be ready
|
||||
logger.debug('Waiting for buildserver to be ready')
|
||||
retry_command(requests.get, [status_endpoint])
|
||||
docker_resource = requests.get(resource_url)
|
||||
c_type = docker_resource.headers['content-type']
|
||||
|
||||
# wait for the job to be complete
|
||||
filetype_msg = ('Request to build file of type: %s with tag: %s' %
|
||||
(c_type, tag_name))
|
||||
logger.info(filetype_msg)
|
||||
build_logs.append_log_message(repository_build.uuid, filetype_msg)
|
||||
|
||||
if c_type not in self._mime_processors:
|
||||
raise RuntimeError('Invalid dockerfile content type: %s' % c_type)
|
||||
|
||||
build_dir = self._mime_processors[c_type](docker_resource)
|
||||
uuid = repository_build.uuid
|
||||
repository_build.phase = 'building'
|
||||
repository_build.status_url = status_endpoint
|
||||
repository_build.save()
|
||||
|
||||
logger.debug('Waiting for job to be complete')
|
||||
status = get_status(status_endpoint)
|
||||
while status != 'error' and status != 'complete':
|
||||
logger.debug('Job status is: %s' % status)
|
||||
time.sleep(5)
|
||||
status = get_status(status_endpoint)
|
||||
try:
|
||||
with DockerfileBuildContext(build_dir, tag_name, access_token,
|
||||
repository_build.uuid) as build_ctxt:
|
||||
built_image = build_ctxt.build()
|
||||
|
||||
logger.debug('Job complete with status: %s' % status)
|
||||
if status == 'error':
|
||||
error_message = requests.get(status_endpoint).json()['message']
|
||||
logger.warning('Job error: %s' % error_message)
|
||||
if not built_image:
|
||||
repository_build.phase = 'error'
|
||||
repository_build.save()
|
||||
build_logs.append_log_message(uuid, 'Unable to build dockerfile.')
|
||||
return False
|
||||
|
||||
repository_build.phase = 'pushing'
|
||||
repository_build.save()
|
||||
|
||||
build_ctxt.push(built_image)
|
||||
|
||||
repository_build.phase = 'complete'
|
||||
repository_build.save()
|
||||
|
||||
except Exception as exc:
|
||||
logger.exception('Exception when processing request.')
|
||||
repository_build.phase = 'error'
|
||||
else:
|
||||
repository_build.phase = 'complete'
|
||||
|
||||
# clean up the DO node
|
||||
logger.debug('Cleaning up DO node.')
|
||||
retry_command(droplet.destroy)
|
||||
|
||||
repository_build.status_url = None
|
||||
repository_build.build_node_id = None
|
||||
repository_build.save()
|
||||
repository_build.save()
|
||||
build_logs.append_log_message(uuid, exc.message)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as outer_ex:
|
||||
# We don't really know what these are, but they are probably retryable
|
||||
logger.exception('Exception processing job: %s' % outer_ex.message)
|
||||
return False
|
||||
|
||||
finally:
|
||||
if not db_connection.is_closed():
|
||||
logger.debug('Closing thread db connection.')
|
||||
db_connection.close()
|
||||
|
||||
|
||||
def process_work_items(pool):
|
||||
logger.debug('Getting work item from queue.')
|
||||
|
||||
item = dockerfile_build_queue.get(processing_time=60*60) # allow 1 hr
|
||||
|
||||
while item:
|
||||
logger.debug('Queue gave us some work: %s' % item.body)
|
||||
|
||||
request = json.loads(item.body)
|
||||
|
||||
def build_callback(item):
|
||||
local_item = item
|
||||
def complete_callback(completed):
|
||||
if completed:
|
||||
logger.debug('Queue item completed successfully, will be removed.')
|
||||
dockerfile_build_queue.complete(local_item)
|
||||
else:
|
||||
# We have a retryable error, add the job back to the queue
|
||||
logger.debug('Queue item incomplete, will be retryed.')
|
||||
dockerfile_build_queue.incomplete(local_item)
|
||||
|
||||
return complete_callback
|
||||
|
||||
logger.debug('Sending work item to thread pool: %s' % pool)
|
||||
pool.apply_async(babysit_builder, [request],
|
||||
callback=build_callback(item))
|
||||
|
||||
item = dockerfile_build_queue.get()
|
||||
|
||||
logger.debug('No more work.')
|
||||
|
||||
if not db_connection.is_closed():
|
||||
logger.debug('Closing thread db connection.')
|
||||
db_connection.close()
|
||||
|
||||
|
||||
def start_worker():
|
||||
pool = ThreadPool(3)
|
||||
logger.debug('Scheduling worker.')
|
||||
|
||||
sched = Scheduler()
|
||||
sched.start()
|
||||
|
||||
sched.add_interval_job(process_work_items, args=[pool], seconds=30)
|
||||
|
||||
while True:
|
||||
time.sleep(60 * 60 * 24) # sleep one day, basically forever
|
||||
|
||||
|
||||
desc = 'Worker daemon to monitor dockerfile build'
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
|
@ -264,16 +313,17 @@ parser.add_argument('--log', default='dockerfilebuild.log',
|
|||
args = parser.parse_args()
|
||||
|
||||
|
||||
worker = DockerfileBuildWorker(dockerfile_build_queue)
|
||||
|
||||
if args.D:
|
||||
handler = logging.FileHandler(args.log)
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
with daemon.DaemonContext(files_preserve=[handler.stream],
|
||||
working_directory=os.getcwd()):
|
||||
start_worker()
|
||||
with daemon.DaemonContext(files_preserve=[handler.stream]):
|
||||
worker.start()
|
||||
|
||||
else:
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
start_worker()
|
||||
worker.start()
|