Merge pull request #300 from coreos-inc/toomanyutils
Refactor the util directory to use subpackages.
This commit is contained in:
commit
f772bd0c9e
46 changed files with 36 additions and 39 deletions
10
app.py
10
app.py
|
@ -21,12 +21,12 @@ from data.buildlogs import BuildLogs
|
||||||
from data.archivedlogs import LogArchive
|
from data.archivedlogs import LogArchive
|
||||||
from data.userevent import UserEventsBuilderModule
|
from data.userevent import UserEventsBuilderModule
|
||||||
from data.queue import WorkQueue
|
from data.queue import WorkQueue
|
||||||
from util.analytics import Analytics
|
from util.saas.analytics import Analytics
|
||||||
from util.exceptionlog import Sentry
|
from util.saas.exceptionlog import Sentry
|
||||||
from util.names import urn_generator
|
from util.names import urn_generator
|
||||||
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
||||||
from util.signing import Signer
|
from util.security.signing import Signer
|
||||||
from util.queuemetrics import QueueMetrics
|
from util.saas.queuemetrics import QueueMetrics
|
||||||
from util.config.provider import FileConfigProvider, TestConfigProvider
|
from util.config.provider import FileConfigProvider, TestConfigProvider
|
||||||
from util.config.configutil import generate_secret_key
|
from util.config.configutil import generate_secret_key
|
||||||
from util.config.superusermanager import SuperUserManager
|
from util.config.superusermanager import SuperUserManager
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from buildman.enums import BuildJobResult
|
from buildman.enums import BuildJobResult
|
||||||
from util.cloudwatch import get_queue
|
from util.saas.cloudwatch import get_queue
|
||||||
|
|
||||||
|
|
||||||
class BuildReporter(object):
|
class BuildReporter(object):
|
||||||
|
|
|
@ -13,7 +13,7 @@ down_revision = '2fb36d4be80d'
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import mysql
|
from sqlalchemy.dialects import mysql
|
||||||
from util.backfill_user_uuids import backfill_user_uuids
|
from util.migrate.backfill_user_uuids import backfill_user_uuids
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
backfill_user_uuids()
|
backfill_user_uuids()
|
||||||
|
|
|
@ -12,7 +12,7 @@ down_revision = '3b4d3a4461dc'
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from util.uncompressedsize import backfill_sizes_from_data
|
from util.migrate.uncompressedsize import backfill_sizes_from_data
|
||||||
|
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
|
|
|
@ -6,7 +6,7 @@ Create Date: 2015-07-21 14:03:44.964200
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from util.migratebitbucketservices import run_bitbucket_migration
|
from util.migrate.migratebitbucketservices import run_bitbucket_migration
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '437ee6269a9d'
|
revision = '437ee6269a9d'
|
||||||
|
|
|
@ -13,7 +13,7 @@ down_revision = '1c5b738283a5'
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from util.migrateslackwebhook import run_slackwebhook_migration
|
from util.migrate.migrateslackwebhook import run_slackwebhook_migration
|
||||||
|
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
|
|
|
@ -10,8 +10,7 @@ from data.users.database import DatabaseUsers
|
||||||
from data.users.externalldap import LDAPUsers
|
from data.users.externalldap import LDAPUsers
|
||||||
from data.users.externaljwt import ExternalJWTAuthN
|
from data.users.externaljwt import ExternalJWTAuthN
|
||||||
from data.users.keystone import KeystoneUsers
|
from data.users.keystone import KeystoneUsers
|
||||||
|
from util.security.aes import AESCipher
|
||||||
from util.aes import AESCipher
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,8 @@ from jsonschema import validate
|
||||||
from data import model
|
from data import model
|
||||||
|
|
||||||
from app import app, userfiles as user_files, github_trigger, get_app_url
|
from app import app, userfiles as user_files, github_trigger, get_app_url
|
||||||
from util.tarfileappender import TarfileAppender
|
from util.registry.tarfileappender import TarfileAppender
|
||||||
from util.ssh import generate_ssh_keypair
|
from util.security.ssh import generate_ssh_keypair
|
||||||
|
|
||||||
|
|
||||||
client = app.config['HTTPCLIENT']
|
client = app.config['HTTPCLIENT']
|
||||||
|
|
|
@ -10,12 +10,12 @@ from app import storage as store, image_diff_queue, app
|
||||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||||
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
||||||
from digest import checksums
|
from digest import checksums
|
||||||
from util import changes
|
from util.registry import changes
|
||||||
from util.http import abort, exact_abort
|
from util.http import abort, exact_abort
|
||||||
from auth.permissions import (ReadRepositoryPermission,
|
from auth.permissions import (ReadRepositoryPermission,
|
||||||
ModifyRepositoryPermission)
|
ModifyRepositoryPermission)
|
||||||
from data import model, database
|
from data import model, database
|
||||||
from util import gzipstream
|
from util.registry import gzipstream
|
||||||
from endpoints.v1 import v1_bp
|
from endpoints.v1 import v1_bp
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
|
|
||||||
|
@ -578,8 +578,7 @@ def process_image_changes(namespace, repository, image_id):
|
||||||
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
||||||
removed_files)
|
removed_files)
|
||||||
|
|
||||||
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files,
|
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files, removed_files)
|
||||||
removed_files)
|
|
||||||
(new_trie, added, changed, removed) = new_metadata
|
(new_trie, added, changed, removed) = new_metadata
|
||||||
|
|
||||||
# Write out the new trie
|
# Write out the new trie
|
||||||
|
|
|
@ -12,8 +12,8 @@ from endpoints.trackhelper import track_and_log
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from storage import Storage
|
from storage import Storage
|
||||||
|
|
||||||
from util.queuefile import QueueFile
|
from util.registry.queuefile import QueueFile
|
||||||
from util.queueprocess import QueueProcess
|
from util.registry.queueprocess import QueueProcess
|
||||||
from formats.squashed import SquashedDockerImage
|
from formats.squashed import SquashedDockerImage
|
||||||
from formats.aci import ACIImage
|
from formats.aci import ACIImage
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from app import app
|
from app import app
|
||||||
from util.streamlayerformat import StreamLayerMerger
|
from util.registry.streamlayerformat import StreamLayerMerger
|
||||||
from formats.tarimageformatter import TarImageFormatter
|
from formats.tarimageformatter import TarImageFormatter
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from app import app
|
from app import app
|
||||||
from util.gzipwrap import GZIP_BUFFER_SIZE
|
from util.registry.gzipwrap import GZIP_BUFFER_SIZE
|
||||||
from util.streamlayerformat import StreamLayerMerger
|
from util.registry.streamlayerformat import StreamLayerMerger
|
||||||
from formats.tarimageformatter import TarImageFormatter
|
from formats.tarimageformatter import TarImageFormatter
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import tarfile
|
import tarfile
|
||||||
from util.gzipwrap import GzipWrap
|
from util.registry.gzipwrap import GzipWrap
|
||||||
|
|
||||||
class TarImageFormatter(object):
|
class TarImageFormatter(object):
|
||||||
""" Base class for classes which produce a TAR containing image and layer data. """
|
""" Base class for classes which produce a TAR containing image and layer data. """
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
""" Swift storage driver. Based on: github.com/bacongobbler/docker-registry-driver-swift/ """
|
""" Swift storage driver. Based on: github.com/bacongobbler/docker-registry-driver-swift/ """
|
||||||
from swiftclient.client import Connection, ClientException
|
from swiftclient.client import Connection, ClientException
|
||||||
from storage.basestorage import BaseStorage
|
from storage.basestorage import BaseStorage
|
||||||
from util.generatorfile import GeneratorFile
|
from util.registry.generatorfile import GeneratorFile
|
||||||
|
|
||||||
from random import SystemRandom
|
from random import SystemRandom
|
||||||
import string
|
import string
|
||||||
|
|
|
@ -2,8 +2,8 @@ import unittest
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from util.streamlayerformat import AUFS_WHITEOUT
|
from util.registry.streamlayerformat import AUFS_WHITEOUT
|
||||||
from util.changes import files_and_dirs_from_tar
|
from util.registry.changes import files_and_dirs_from_tar
|
||||||
|
|
||||||
class TestChanges(unittest.TestCase):
|
class TestChanges(unittest.TestCase):
|
||||||
def create_layer(self, **kwargs):
|
def create_layer(self, **kwargs):
|
||||||
|
|
|
@ -2,8 +2,8 @@ import unittest
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from util.streamlayerformat import StreamLayerMerger, AUFS_WHITEOUT
|
from util.registry.streamlayerformat import StreamLayerMerger, AUFS_WHITEOUT
|
||||||
from util.tarlayerformat import TarLayerReadException
|
from util.registry.tarlayerformat import TarLayerReadException
|
||||||
|
|
||||||
class TestStreamLayerMerger(unittest.TestCase):
|
class TestStreamLayerMerger(unittest.TestCase):
|
||||||
def create_layer(self, **kwargs):
|
def create_layer(self, **kwargs):
|
||||||
|
|
|
@ -3,7 +3,7 @@ import unittest
|
||||||
from itertools import islice
|
from itertools import islice
|
||||||
|
|
||||||
from util.validation import generate_valid_usernames
|
from util.validation import generate_valid_usernames
|
||||||
from util.generatorfile import GeneratorFile
|
from util.registry.generatorfile import GeneratorFile
|
||||||
|
|
||||||
class TestGeneratorFile(unittest.TestCase):
|
class TestGeneratorFile(unittest.TestCase):
|
||||||
def sample_generator(self):
|
def sample_generator(self):
|
||||||
|
|
|
@ -16,7 +16,7 @@ from flask.ext.mail import Mail, Message
|
||||||
from data.database import validate_database_url, User
|
from data.database import validate_database_url, User
|
||||||
from storage import get_storage_driver
|
from storage import get_storage_driver
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user
|
||||||
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
from util.config.oauth import GoogleOAuthConfig, GithubOAuthConfig, GitLabOAuthConfig
|
||||||
from bitbucket import BitBucket
|
from bitbucket import BitBucket
|
||||||
|
|
||||||
from app import app, CONFIG_PROVIDER, get_app_url, OVERRIDE_CONFIG_DIRECTORY
|
from app import app, CONFIG_PROVIDER, get_app_url, OVERRIDE_CONFIG_DIRECTORY
|
||||||
|
|
0
util/migrate/__init__.py
Normal file
0
util/migrate/__init__.py
Normal file
|
@ -5,7 +5,7 @@ from data import model
|
||||||
from data.database import ImageStorage
|
from data.database import ImageStorage
|
||||||
from app import app, storage as store
|
from app import app, storage as store
|
||||||
from data.database import db, db_random_func
|
from data.database import db, db_random_func
|
||||||
from util.gzipstream import ZLIB_GZIP_WINDOW
|
from util.registry.gzipstream import ZLIB_GZIP_WINDOW
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
0
util/registry/__init__.py
Normal file
0
util/registry/__init__.py
Normal file
|
@ -1,7 +1,7 @@
|
||||||
import marisa_trie
|
import marisa_trie
|
||||||
import os
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
from aufs import is_aufs_metadata, get_deleted_prefix
|
from util.registry.aufs import is_aufs_metadata, get_deleted_prefix
|
||||||
|
|
||||||
ALLOWED_TYPES = {tarfile.REGTYPE, tarfile.AREGTYPE}
|
ALLOWED_TYPES = {tarfile.REGTYPE, tarfile.AREGTYPE}
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import marisa_trie
|
import marisa_trie
|
||||||
import os
|
import os
|
||||||
import tarfile
|
from util.registry.aufs import is_aufs_metadata, get_deleted_prefix
|
||||||
from aufs import is_aufs_metadata, get_deleted_prefix
|
from util.registry.tarlayerformat import TarLayerFormat
|
||||||
from util.tarlayerformat import TarLayerFormat
|
|
||||||
|
|
||||||
AUFS_METADATA = u'.wh..wh.'
|
AUFS_METADATA = u'.wh..wh.'
|
||||||
|
|
|
@ -2,8 +2,8 @@ import tarfile
|
||||||
|
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
from util.tarlayerformat import TarLayerFormat
|
from util.registry.tarlayerformat import TarLayerFormat
|
||||||
from util.gzipwrap import GzipWrap
|
from util.registry.gzipwrap import GzipWrap
|
||||||
|
|
||||||
class TarfileAppender(TarLayerFormat):
|
class TarfileAppender(TarLayerFormat):
|
||||||
""" Helper class which allows for appending entries to a gzipped-tarfile and doing so
|
""" Helper class which allows for appending entries to a gzipped-tarfile and doing so
|
0
util/saas/__init__.py
Normal file
0
util/saas/__init__.py
Normal file
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from util.cloudwatch import get_queue
|
from util.saas.cloudwatch import get_queue
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
0
util/security/__init__.py
Normal file
0
util/security/__init__.py
Normal file
Reference in a new issue