Refactor the util directory to use subpackages.

This commit is contained in:
Jake Moshenko 2015-08-03 15:49:10 -04:00
parent 974ccaa2e7
commit 18100be481
46 changed files with 36 additions and 39 deletions

10
app.py
View file

@ -21,12 +21,12 @@ from data.buildlogs import BuildLogs
from data.archivedlogs import LogArchive
from data.userevent import UserEventsBuilderModule
from data.queue import WorkQueue
from util.analytics import Analytics
from util.exceptionlog import Sentry
from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry
from util.names import urn_generator
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
from util.signing import Signer
from util.queuemetrics import QueueMetrics
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
from util.security.signing import Signer
from util.saas.queuemetrics import QueueMetrics
from util.config.provider import FileConfigProvider, TestConfigProvider
from util.config.configutil import generate_secret_key
from util.config.superusermanager import SuperUserManager

View file

@ -1,5 +1,5 @@
from buildman.enums import BuildJobResult
from util.cloudwatch import get_queue
from util.saas.cloudwatch import get_queue
class BuildReporter(object):

View file

@ -13,7 +13,7 @@ down_revision = '2fb36d4be80d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from util.backfill_user_uuids import backfill_user_uuids
from util.migrate.backfill_user_uuids import backfill_user_uuids
def upgrade(tables):
backfill_user_uuids()

View file

@ -12,7 +12,7 @@ down_revision = '3b4d3a4461dc'
from alembic import op
import sqlalchemy as sa
from util.uncompressedsize import backfill_sizes_from_data
from util.migrate.uncompressedsize import backfill_sizes_from_data
def upgrade(tables):

View file

@ -6,7 +6,7 @@ Create Date: 2015-07-21 14:03:44.964200
"""
from util.migratebitbucketservices import run_bitbucket_migration
from util.migrate.migratebitbucketservices import run_bitbucket_migration
# revision identifiers, used by Alembic.
revision = '437ee6269a9d'

View file

@ -13,7 +13,7 @@ down_revision = '1c5b738283a5'
from alembic import op
import sqlalchemy as sa
from util.migrateslackwebhook import run_slackwebhook_migration
from util.migrate.migrateslackwebhook import run_slackwebhook_migration
def upgrade(tables):

View file

@ -10,8 +10,7 @@ from data.users.database import DatabaseUsers
from data.users.externalldap import LDAPUsers
from data.users.externaljwt import ExternalJWTAuthN
from data.users.keystone import KeystoneUsers
from util.aes import AESCipher
from util.security.aes import AESCipher
logger = logging.getLogger(__name__)

View file

@ -17,8 +17,8 @@ from jsonschema import validate
from data import model
from app import app, userfiles as user_files, github_trigger, get_app_url
from util.tarfileappender import TarfileAppender
from util.ssh import generate_ssh_keypair
from util.registry.tarfileappender import TarfileAppender
from util.security.ssh import generate_ssh_keypair
client = app.config['HTTPCLIENT']

View file

@ -10,12 +10,12 @@ from app import storage as store, image_diff_queue, app
from auth.auth import process_auth, extract_namespace_repo_from_session
from auth.auth_context import get_authenticated_user, get_grant_user_context
from digest import checksums
from util import changes
from util.registry import changes
from util.http import abort, exact_abort
from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from data import model, database
from util import gzipstream
from util.registry import gzipstream
from endpoints.v1 import v1_bp
from endpoints.decorators import anon_protect
@ -578,8 +578,7 @@ def process_image_changes(namespace, repository, image_id):
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
removed_files)
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files,
removed_files)
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files, removed_files)
(new_trie, added, changed, removed) = new_metadata
# Write out the new trie

View file

@ -12,8 +12,8 @@ from endpoints.trackhelper import track_and_log
from endpoints.decorators import anon_protect
from storage import Storage
from util.queuefile import QueueFile
from util.queueprocess import QueueProcess
from util.registry.queuefile import QueueFile
from util.registry.queueprocess import QueueProcess
from formats.squashed import SquashedDockerImage
from formats.aci import ACIImage

View file

@ -1,5 +1,5 @@
from app import app
from util.streamlayerformat import StreamLayerMerger
from util.registry.streamlayerformat import StreamLayerMerger
from formats.tarimageformatter import TarImageFormatter
import json

View file

@ -1,6 +1,6 @@
from app import app
from util.gzipwrap import GZIP_BUFFER_SIZE
from util.streamlayerformat import StreamLayerMerger
from util.registry.gzipwrap import GZIP_BUFFER_SIZE
from util.registry.streamlayerformat import StreamLayerMerger
from formats.tarimageformatter import TarImageFormatter
import copy

View file

@ -1,5 +1,5 @@
import tarfile
from util.gzipwrap import GzipWrap
from util.registry.gzipwrap import GzipWrap
class TarImageFormatter(object):
""" Base class for classes which produce a TAR containing image and layer data. """

View file

@ -1,7 +1,7 @@
""" Swift storage driver. Based on: github.com/bacongobbler/docker-registry-driver-swift/ """
from swiftclient.client import Connection, ClientException
from storage.basestorage import BaseStorage
from util.generatorfile import GeneratorFile
from util.registry.generatorfile import GeneratorFile
from random import SystemRandom
import string

View file

@ -2,8 +2,8 @@ import unittest
import tarfile
from StringIO import StringIO
from util.streamlayerformat import AUFS_WHITEOUT
from util.changes import files_and_dirs_from_tar
from util.registry.streamlayerformat import AUFS_WHITEOUT
from util.registry.changes import files_and_dirs_from_tar
class TestChanges(unittest.TestCase):
def create_layer(self, **kwargs):

View file

@ -2,8 +2,8 @@ import unittest
import tarfile
from StringIO import StringIO
from util.streamlayerformat import StreamLayerMerger, AUFS_WHITEOUT
from util.tarlayerformat import TarLayerReadException
from util.registry.streamlayerformat import StreamLayerMerger, AUFS_WHITEOUT
from util.registry.tarlayerformat import TarLayerReadException
class TestStreamLayerMerger(unittest.TestCase):
def create_layer(self, **kwargs):

View file

@ -3,7 +3,7 @@ import unittest
from itertools import islice
from util.validation import generate_valid_usernames
from util.generatorfile import GeneratorFile
from util.registry.generatorfile import GeneratorFile
class TestGeneratorFile(unittest.TestCase):
def sample_generator(self):

View file

@ -16,7 +16,7 @@ from flask.ext.mail import Mail, Message
from data.database import validate_database_url, User
from storage import get_storage_driver
from auth.auth_context import get_authenticated_user
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
from bitbucket import BitBucket
from app import app, CONFIG_PROVIDER, get_app_url, OVERRIDE_CONFIG_DIRECTORY

0
util/migrate/__init__.py Normal file
View file

View file

@ -5,7 +5,7 @@ from data import model
from data.database import ImageStorage
from app import app, storage as store
from data.database import db, db_random_func
from util.gzipstream import ZLIB_GZIP_WINDOW
from util.registry.gzipstream import ZLIB_GZIP_WINDOW
logger = logging.getLogger(__name__)

View file

View file

@ -1,7 +1,7 @@
import marisa_trie
import os
import tarfile
from aufs import is_aufs_metadata, get_deleted_prefix
from util.registry.aufs import is_aufs_metadata, get_deleted_prefix
ALLOWED_TYPES = {tarfile.REGTYPE, tarfile.AREGTYPE}

View file

@ -1,8 +1,7 @@
import marisa_trie
import os
import tarfile
from aufs import is_aufs_metadata, get_deleted_prefix
from util.tarlayerformat import TarLayerFormat
from util.registry.aufs import is_aufs_metadata, get_deleted_prefix
from util.registry.tarlayerformat import TarLayerFormat
AUFS_METADATA = u'.wh..wh.'

View file

@ -2,8 +2,8 @@ import tarfile
from cStringIO import StringIO
from util.tarlayerformat import TarLayerFormat
from util.gzipwrap import GzipWrap
from util.registry.tarlayerformat import TarLayerFormat
from util.registry.gzipwrap import GzipWrap
class TarfileAppender(TarLayerFormat):
""" Helper class which allows for appending entries to a gzipped-tarfile and doing so

0
util/saas/__init__.py Normal file
View file

View file

@ -1,6 +1,6 @@
import logging
from util.cloudwatch import get_queue
from util.saas.cloudwatch import get_queue
logger = logging.getLogger(__name__)

View file