Merge pull request #2551 from coreos-inc/structured-logs

Add log formatter class
This commit is contained in:
Antoine Legrand 2017-06-07 08:22:18 -07:00 committed by GitHub
commit f0dd2e348b
27 changed files with 402 additions and 41 deletions

View file

@ -7,7 +7,7 @@ from app import app, storage
from data.database import UseThenDisconnect, BlobUpload
from data import model
from workers.worker import Worker
from util.log import logfile_path
logger = logging.getLogger(__name__)
@ -51,6 +51,6 @@ class BlobUploadCleanupWorker(Worker):
if __name__ == "__main__":
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
worker = BlobUploadCleanupWorker()
worker.start()

View file

@ -3,6 +3,7 @@ import time
from app import app, storage, chunk_cleanup_queue
from workers.queueworker import QueueWorker, JobException
from util.log import logfile_path
logger = logging.getLogger(__name__)
@ -26,7 +27,7 @@ class ChunkCleanupWorker(QueueWorker):
if __name__ == "__main__":
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False)
engines = set([config[0] for config in app.config.get('DISTRIBUTED_STORAGE_CONFIG', {}).values()])
if 'SwiftStorage' not in engines:

View file

@ -7,6 +7,7 @@ from data.database import UseThenDisconnect
from data import model
from data.model.image import total_image_count
from util.locking import GlobalLock, LockNotAcquiredException
from util.log import logfile_path
from workers.worker import Worker
logger = logging.getLogger(__name__)
@ -44,7 +45,7 @@ class GlobalPrometheusStatsWorker(Worker):
def main():
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
if not app.config.get('PROMETHEUS_AGGREGATOR_URL'):
logger.debug('Prometheus not enabled; skipping global stats reporting')

View file

@ -13,6 +13,7 @@ from data.model.log import (get_stale_logs, get_stale_logs_start_id,
get_stale_logs_cutoff_id, delete_stale_logs)
from data.userfiles import DelegateUserfiles
from util.locking import GlobalLock, LockNotAcquiredException
from util.log import logfile_path
from util.streamingjsonencoder import StreamingJSONEncoder
from workers.worker import Worker
@ -102,7 +103,7 @@ def log_dict(log):
def main():
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
if not features.ACTION_LOG_ROTATION or None in [SAVE_PATH, SAVE_LOCATION]:
logger.debug('Action log rotation worker not enabled; skipping')

View file

@ -13,6 +13,7 @@ from data.model.image import (get_images_eligible_for_scan, get_image_pk_field,
from util.secscan.api import SecurityConfigValidator, APIRequestFailure
from util.secscan.analyzer import LayerAnalyzer, PreemptedException
from util.migrate.allocator import yield_random_entries
from util.log import logfile_path
from endpoints.v2 import v2_bp
@ -109,6 +110,6 @@ if __name__ == '__main__':
while True:
time.sleep(100000)
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
worker = SecurityWorker()
worker.start()

View file

@ -1,3 +1,4 @@
import os
import logging
import features
import time
@ -6,6 +7,7 @@ from app import app, storage, image_replication_queue
from data.database import CloseForLongOperation
from data import model
from workers.queueworker import QueueWorker, WorkerUnhealthyException
from util.log import logfile_path
logger = logging.getLogger(__name__)
@ -78,7 +80,7 @@ class StorageReplicationWorker(QueueWorker):
if __name__ == "__main__":
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False)
has_local_storage = False

View file

@ -7,6 +7,7 @@ from app import app, authentication
from data.users.teamsync import sync_teams_to_groups
from workers.worker import Worker
from util.timedeltastring import convert_to_timedelta
from util.log import logfile_path
logger = logging.getLogger(__name__)
@ -25,7 +26,7 @@ class TeamSynchronizationWorker(Worker):
def main():
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
if not features.TEAM_SYNCING or not authentication.federated_service:
logger.debug('Team syncing is disabled; sleeping')

View file

@ -13,7 +13,7 @@ from raven import Client
from app import app
from data.database import UseThenDisconnect
from util.log import logfile_path
logger = logging.getLogger(__name__)
@ -63,7 +63,7 @@ class Worker(object):
pass
def start(self):
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
if not app.config.get('SETUP_COMPLETE', False):
logger.info('Product setup is not yet complete; skipping worker startup')