Merge pull request #2366 from coreos-inc/alert-spam-fixes

Small fixes for alert spam
This commit is contained in:
josephschorr 2017-02-22 14:18:18 -05:00 committed by GitHub
commit f7a7d30ec2
13 changed files with 87 additions and 25 deletions

View file

@ -132,9 +132,13 @@ class BuilderServer(object):
logger.debug('Unregistering component with realm %s and token %s',
component.builder_realm, component.expected_token)
self._realm_map.pop(component.builder_realm)
self._current_components.remove(component)
self._session_factory.remove(component)
self._realm_map.pop(component.builder_realm, None)
if component in self._current_components:
self._current_components.remove(component)
if component in self._session_factory:
self._session_factory.remove(component)
def _job_heartbeat(self, build_job):
self._queue.extend_processing(build_job.job_item, seconds_from_now=JOB_TIMEOUT_SECONDS,

View file

@ -2,9 +2,14 @@ import logging
import os.path
import base64
from app import app, github_trigger
from functools import wraps
from ssl import SSLError
from github import (Github, UnknownObjectException, GithubException,
BadCredentialsException as GitHubBadCredentialsException)
from jsonschema import validate
from app import app, github_trigger
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
TriggerDeactivationException, TriggerStartException,
EmptyRepositoryException, ValidationRequestException,
@ -13,13 +18,10 @@ from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivation
find_matching_branches)
from buildtrigger.basehandler import BuildTriggerHandler
from endpoints.exception import ExternalServiceError
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter
from github import (Github, UnknownObjectException, GithubException,
BadCredentialsException as GitHubBadCredentialsException)
logger = logging.getLogger(__name__)
GITHUB_WEBHOOK_PAYLOAD_SCHEMA = {
@ -139,6 +141,18 @@ def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user
return config.dict_value()
def _catch_ssl_errors(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except SSLError as se:
msg = 'Request to the GitHub API failed: %s' % se.message
logger.exception(msg)
raise ExternalServiceError(msg)
return wrapper
class GithubBuildTrigger(BuildTriggerHandler):
"""
BuildTrigger for GitHub that uses the archive API and buildpacks.
@ -169,6 +183,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
return default_msg
@_catch_ssl_errors
def activate(self, standard_webhook_url):
config = self.config
new_build_source = config['build_source']
@ -216,6 +231,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
return config, {'private_key': private_key}
@_catch_ssl_errors
def deactivate(self):
config = self.config
gh_client = self._get_client()
@ -256,6 +272,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
self.config = config
return config
@_catch_ssl_errors
def list_build_sources(self):
gh_client = self._get_client()
usr = gh_client.get_user()
@ -306,6 +323,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
entries.sort(key=lambda e: e['info']['name'])
return entries
@_catch_ssl_errors
def list_build_subdirs(self):
config = self.config
gh_client = self._get_client()
@ -331,6 +349,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
raise RepositoryReadException(message)
@_catch_ssl_errors
def load_dockerfile_contents(self):
config = self.config
gh_client = self._get_client()
@ -352,6 +371,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
message = ghe.data.get('message', 'Unable to read Dockerfile: %s' % source)
raise RepositoryReadException(message)
@_catch_ssl_errors
def list_field_values(self, field_name, limit=None):
if field_name == 'refs':
branches = self.list_field_values('branch_name')
@ -444,6 +464,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
'commit_info': commit_info
}
@_catch_ssl_errors
def manual_start(self, run_parameters=None):
config = self.config
source = config['build_source']
@ -474,6 +495,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
metadata = GithubBuildTrigger._build_metadata_for_commit(commit_sha, ref, repo)
return self.prepare_build(metadata, is_manual=True)
@_catch_ssl_errors
def lookup_user(self, username):
try:
gh_client = self._get_client()
@ -485,6 +507,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
except GithubException:
return None
@_catch_ssl_errors
def handle_trigger_request(self, request):
# Check the payload to see if we should skip it based on the lack of a head_commit.
payload = request.get_json()

View file

@ -14,7 +14,7 @@ from buildtrigger.basehandler import BuildTriggerHandler
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter
from endpoints.exception import ExternalServiceTimeout
from endpoints.exception import ExternalServiceError
import gitlab
import requests
@ -78,7 +78,7 @@ def _catch_timeouts(func):
except requests.exceptions.Timeout:
msg = 'Request to the GitLab API timed out'
logger.exception(msg)
raise ExternalServiceTimeout(msg)
raise ExternalServiceError(msg)
return wrapper

View file

@ -305,13 +305,18 @@ get_epoch_timestamp_ms = lambda: int(time.time() * 1000)
def close_db_filter(_):
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
try:
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
except AttributeError:
# If the database is closed between the time we check on line 309 and db.close() is called on
# 311, then an AttributeError will be raised. Simply eat this exception and continue onward.
pass
class QuayUserField(ForeignKeyField):

View file

@ -96,6 +96,9 @@ class ServiceNameInvalid(DataModelException):
class TagAlreadyCreatedException(DataModelException):
pass
class StaleTagException(DataModelException):
pass
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):

View file

@ -54,7 +54,10 @@ def purge_repository(namespace_name, repository_name):
not need to be checked or responded to.
"""
repo = _basequery.get_existing_repository(namespace_name, repository_name)
try:
repo = _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
return False
# Delete all tags to allow gc to reclaim storage
previously_referenced = tag.purge_all_tags(repo)
@ -74,7 +77,11 @@ def purge_repository(namespace_name, repository_name):
return False
# Delete the rest of the repository metadata
fetched = _basequery.get_existing_repository(namespace_name, repository_name)
try:
fetched = _basequery.get_existing_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
return False
fetched.delete_instance(recursive=True, delete_nullable=False)
return True

View file

@ -4,7 +4,7 @@ from uuid import uuid4
from peewee import IntegrityError
from data.model import (image, db_transaction, DataModelException, _basequery,
InvalidManifestException, TagAlreadyCreatedException)
InvalidManifestException, TagAlreadyCreatedException, StaleTagException)
from data.database import (RepositoryTag, Repository, Image, ImageStorage, Namespace, TagManifest,
RepositoryNotification, Label, TagManifestLabel, get_epoch_timestamp,
db_for_update)
@ -92,6 +92,9 @@ def create_or_update_tag(namespace_name, repository_name, tag_name, tag_docker_i
tag.save()
except RepositoryTag.DoesNotExist:
pass
except IntegrityError:
msg = 'Tag with name %s was stale when we tried to update it; Please retry the push'
raise StaleTagException(msg % tag_name)
try:
image_obj = Image.get(Image.docker_image_id == tag_docker_image_id, Image.repository == repo)

View file

@ -257,8 +257,13 @@ def get_organization_team_member_invites(teamid):
def delete_team_email_invite(team, email):
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
try:
found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
except TeamMemberInvite.DoesNotExist:
return False
found.delete_instance()
return True
def delete_team_user_invite(team, user_obj):

View file

@ -276,9 +276,12 @@ class RepositoryBuildList(RepositoryParamResource):
if repo is None:
raise NotFound()
build_name = (user_files.get_file_checksum(dockerfile_id)
if dockerfile_id
else hashlib.sha224(archive_url).hexdigest()[0:7])
try:
build_name = (user_files.get_file_checksum(dockerfile_id)
if dockerfile_id
else hashlib.sha224(archive_url).hexdigest()[0:7])
except IOError:
raise InvalidRequest('File %s could not be found or is invalid' % dockerfile_id)
prepared = PreparedBuild()
prepared.build_name = build_name

View file

@ -347,7 +347,9 @@ class InviteTeamMember(ApiResource):
raise NotFound()
# Delete the invite.
model.team.delete_team_email_invite(team, email)
if not model.team.delete_team_email_invite(team, email):
raise NotFound()
log_action('org_delete_team_member_invite', orgname, {
'email': email,
'team': teamname,

View file

@ -77,7 +77,7 @@ class ApiException(Exception):
return rv
class ExternalServiceTimeout(ApiException):
class ExternalServiceError(ApiException):
def __init__(self, error_description, payload=None):
ApiException.__init__(self, ApiErrorType.external_service_timeout, 520, error_description, payload)

View file

@ -134,6 +134,9 @@ class WebEndpointTestCase(EndpointTestCase):
def test_repo_view(self):
self.getResponse('web.repository', path='devtable/simple')
def test_unicode_repo_view(self):
self.getResponse('web.repository', path='%E2%80%8Bcoreos/hyperkube%E2%80%8B')
def test_org_view(self):
self.getResponse('web.org_view', path='buynlarge')

View file

@ -1,6 +1,8 @@
import urllib
import re
import anunidecode # Don't listen to pylint's lies. This import is required for unidecode below.
from uuid import uuid4
REPOSITORY_NAME_REGEX = re.compile(r'^[\.a-zA-Z0-9_-]+$')
@ -23,6 +25,8 @@ def escape_tag(tag, default='latest'):
def parse_namespace_repository(repository, library_namespace, include_tag=False):
repository = repository.encode('unidecode', 'ignore')
parts = repository.rstrip('/').split('/', 1)
if len(parts) < 2:
namespace = library_namespace