From 5e6aa6648b695ca57483ae008c5bf5d4debeb6f3 Mon Sep 17 00:00:00 2001 From: Charlton Austin Date: Tue, 20 Jun 2017 15:50:46 -0400 Subject: [PATCH 01/22] fix(endpoints): added in proper error handling before we would return a 400 without a message because the errors were not being caught Issue: https://www.pivotaltracker.com/story/show/145459707 - [ ] It works! - [ ] Comments provide sufficient explanations for the next contributor - [ ] Tests cover changes and corner cases - [ ] Follows Quay syntax patterns and format --- data/model/label.py | 2 +- endpoints/api/manifest.py | 15 ++++++++++++--- test/test_api_usage.py | 14 ++++++++++++++ 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/data/model/label.py b/data/model/label.py index 467eca86f..aef0a9449 100644 --- a/data/model/label.py +++ b/data/model/label.py @@ -62,7 +62,7 @@ def create_manifest_label(tag_manifest, key, value, source_type_name, media_type media_type_id = _get_media_type_id(media_type_name) if media_type_id is None: - raise InvalidMediaTypeException + raise InvalidMediaTypeException() source_type_id = _get_label_source_type_id(source_type_name) diff --git a/endpoints/api/manifest.py b/endpoints/api/manifest.py index e96283f7f..66716da3f 100644 --- a/endpoints/api/manifest.py +++ b/endpoints/api/manifest.py @@ -10,6 +10,7 @@ from endpoints.exception import NotFound from data import model from digest import digest_tools +from util.validation import VALID_LABEL_KEY_REGEX BASE_MANIFEST_ROUTE = '/v1/repository//manifest/' MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN) @@ -92,9 +93,17 @@ class RepositoryManifestLabels(RepositoryParamResource): if label_validator.has_reserved_prefix(label_data['key']): abort(400, message='Label has a reserved prefix') - label = model.label.create_manifest_label(tag_manifest, label_data['key'], - label_data['value'], 'api', - media_type_name=label_data['media_type']) + label = None + try: + label = model.label.create_manifest_label(tag_manifest, label_data['key'], + label_data['value'], 'api', + media_type_name=label_data['media_type']) + except model.InvalidLabelKeyException: + abort(400, message='Label is of an invalid format or missing please use %s format for labels'.format( + VALID_LABEL_KEY_REGEX)) + except model.InvalidMediaTypeException: + abort(400, message='Media type is invalid please use a valid media type of text/plain or application/json') + metadata = { 'id': label.uuid, 'key': label_data['key'], diff --git a/test/test_api_usage.py b/test/test_api_usage.py index 14316d41e..75fed8888 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -4834,6 +4834,20 @@ class TestRepositoryManifestLabels(ApiTestCase): self.assertEquals(0, len(json['labels'])) + self.postJsonResponse(RepositoryManifestLabels, + params=dict(repository=repository, + manifestref=tag_manifest.digest), + data=dict(key='bad_label', value='world', + media_type='text/plain'), + expected_code=400) + + self.postJsonResponse(RepositoryManifestLabels, + params=dict(repository=repository, + manifestref=tag_manifest.digest), + data=dict(key='hello', value='world', + media_type='bad_media_type'), + expected_code=400) + # Add some labels to the manifest. with assert_action_logged('manifest_label_add'): label1 = self.postJsonResponse(RepositoryManifestLabels, From 31d518f3e1db706080904eb846bd6a48a3789da4 Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Tue, 6 Jun 2017 16:03:13 -0700 Subject: [PATCH 02/22] added Protractor for end-to-end testing --- package.json | 3 + .../manage-trigger.view-object.ts | 62 +++ static/test/e2e/sanity.scenario.ts | 19 + static/test/e2e/trigger-creation.scenario.ts | 155 ++++++++ static/test/protractor.conf.ts | 66 ++++ yarn.lock | 364 +++++++++++------- 6 files changed, 524 insertions(+), 145 deletions(-) create mode 100644 static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts create mode 100644 static/test/e2e/sanity.scenario.ts create mode 100644 static/test/e2e/trigger-creation.scenario.ts create mode 100644 static/test/protractor.conf.ts diff --git a/package.json b/package.json index a226306ca..11ffa8c8e 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "dev": "./node_modules/.bin/karma start --browsers ChromeHeadless", "test": "./node_modules/.bin/karma start --single-run --browsers ChromeHeadless", "test:node": "JASMINE_CONFIG_PATH=static/test/jasmine.json ./node_modules/.bin/jasmine-ts './static/js/**/*.spec.ts'", + "e2e": "./node_modules/.bin/ts-node ./node_modules/.bin/protractor static/test/protractor.conf.ts", "build": "NODE_ENV=production ./node_modules/.bin/webpack --progress", "watch": "./node_modules/.bin/webpack --watch" }, @@ -65,11 +66,13 @@ "karma-jasmine": "^0.3.8", "karma-webpack": "^1.8.1", "ngtemplate-loader": "^1.3.1", + "protractor": "^5.1.2", "script-loader": "^0.7.0", "source-map-loader": "0.1.5", "style-loader": "0.13.1", "ts-loader": "^0.9.5", "ts-mocks": "^0.2.2", + "ts-node": "^3.0.6", "typescript": "^2.2.1", "typings": "1.4.0", "webpack": "^2.2" diff --git a/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts b/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts new file mode 100644 index 000000000..3998cfb27 --- /dev/null +++ b/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts @@ -0,0 +1,62 @@ +import { element, by, browser, $, ElementFinder, ExpectedConditions as until } from 'protractor'; + + +export class ManageTriggerViewObject { + + public sections: {[name: string]: ElementFinder} = { + namespace: $('linear-workflow-section[section-id=namespace]'), + githostrepo: $('linear-workflow-section[section-id=repo][section-title="Select Repository"]'), + customrepo: $('linear-workflow-section[section-id=repo][section-title="Git Repository"]'), + triggeroptions: $('linear-workflow-section[section-id=triggeroptions]'), + dockerfilelocation: $('linear-workflow-section[section-id=dockerfilelocation]'), + contextlocation: $('linear-workflow-section[section-id=contextlocation]'), + robot: $('linear-workflow-section[section-id=robot]'), + verification: $('linear-workflow-section[section-id=verification]'), + }; + + private customGitRepoInput: ElementFinder = element(by.model('$ctrl.buildSource')); + private dockerfileLocationInput: ElementFinder = this.sections['dockerfilelocation'].$('input'); + private dockerfileLocationDropdownButton: ElementFinder = this.sections['dockerfilelocation'].$('button[data-toggle=dropdown'); + private dockerContextInput: ElementFinder = this.sections['contextlocation'].$('input'); + private dockerContextDropdownButton: ElementFinder = this.sections['contextlocation'].$('button[data-toggle=dropdown'); + private robotAccountOptions: ElementFinder = this.sections['robot'].element(by.repeater('$ctrl.orderedData.visibleEntries')); + + public continue(): Promise { + return Promise.resolve(element(by.buttonText('Continue')).click()); + } + + public enterRepositoryURL(url: string): Promise { + browser.wait(until.presenceOf(this.customGitRepoInput)); + this.customGitRepoInput.clear(); + + return Promise.resolve(this.customGitRepoInput.sendKeys(url)); + } + + public enterDockerfileLocation(path: string): Promise { + browser.wait(until.presenceOf(this.dockerfileLocationInput)); + this.dockerfileLocationInput.clear(); + + return Promise.resolve(this.dockerfileLocationInput.sendKeys(path)); + } + + public getDockerfileSuggestions(): Promise { + return Promise.resolve(this.dockerfileLocationDropdownButton.click()) + .then(() => element.all(by.repeater('$ctrl.paths')).map(result => result.getText())); + } + + public enterDockerContext(path: string): Promise { + browser.wait(until.presenceOf(this.dockerContextInput)); + this.dockerContextInput.clear(); + + return Promise.resolve(this.dockerContextInput.sendKeys(path)); + } + + public getDockerContextSuggestions(): Promise { + return Promise.resolve(this.dockerContextDropdownButton.click()) + .then(() => element.all(by.repeater('$ctrl.contexts')).map(result => result.getText())); + } + + public selectRobotAccount(index: number): Promise { + return Promise.resolve(element.all(by.css('input[type=radio]')).get(index).click()); + } +} diff --git a/static/test/e2e/sanity.scenario.ts b/static/test/e2e/sanity.scenario.ts new file mode 100644 index 000000000..c2400d242 --- /dev/null +++ b/static/test/e2e/sanity.scenario.ts @@ -0,0 +1,19 @@ +import { browser } from 'protractor'; +import { appHost } from '../protractor.conf'; + + +describe("sanity test", () => { + + beforeEach(() => { + browser.get(appHost); + }); + + it("loads home view with no AngularJS errors", () => { + browser.manage().logs().get('browser') + .then((browserLog: any) => { + browserLog.forEach((log: any) => { + expect(log.message).not.toContain("angular"); + }); + }); + }); +}); diff --git a/static/test/e2e/trigger-creation.scenario.ts b/static/test/e2e/trigger-creation.scenario.ts new file mode 100644 index 000000000..61f33084c --- /dev/null +++ b/static/test/e2e/trigger-creation.scenario.ts @@ -0,0 +1,155 @@ +import { browser, element, by, $, $$ } from 'protractor'; +import { ManageTriggerViewObject } from '../../js/directives/ui/manage-trigger/manage-trigger.view-object'; +import { appHost } from '../protractor.conf'; + + +describe("Trigger Creation", () => { + const username = 'devtable'; + const password = 'password'; + var manageTriggerView: ManageTriggerViewObject = new ManageTriggerViewObject(); + + beforeAll((done) => { + browser.waitForAngularEnabled(false); + + // Sign in + browser.get(appHost); + $$('a[href="/signin/"]').get(1).click(); + $('#signin-username').sendKeys(username); + $('#signin-password').sendKeys(password); + element(by.partialButtonText('Sign in')).click(); + browser.sleep(4000).then(() => done()); + }); + + afterAll(() => { + browser.waitForAngularEnabled(true); + // TODO(alecmerdler): Delete all created triggers + }); + + describe("for custom git", () => { + + beforeAll(() => { + // Navigate to trigger setup + browser.get(`${appHost}/repository/devtable/simple?tab=builds`) + }); + + it("can select custom git repository push as a trigger option", (done) => { + element(by.buttonText('Create Build Trigger')).click(); + element(by.linkText('Custom Git Repository Push')).click(); + browser.sleep(1000); + done(); + }); + + it("shows custom git repository section first", () => { + expect(manageTriggerView.sections['customrepo'].isDisplayed()).toBe(true); + }); + + it("does not accept invalid custom git repository URL's", () => { + manageTriggerView.continue() + .then(() => fail('Should not accept empty input for repository URL')) + .catch(() => manageTriggerView.enterRepositoryURL('git@some')) + .then(() => manageTriggerView.continue()) + .then(() => fail('Should not accept invalid input for repository URL')) + .catch(() => null); + }); + + it("proceeds to Dockerfile location section when given valid URL", () => { + manageTriggerView.enterRepositoryURL('git@somegit.com:someuser/somerepo.git'); + manageTriggerView.continue() + .then(() => { + expect(manageTriggerView.sections['dockerfilelocation'].isDisplayed()).toBe(true); + }) + .catch(reason => fail(reason)); + }); + + it("does not accept Dockerfile location that does not end with a filename", () => { + manageTriggerView.enterDockerfileLocation('/') + .then(() => manageTriggerView.continue()) + .then(() => fail('Should not accept Dockerfile location that does not end with a filename')) + .catch(() => null); + }); + + it("does not provide Dockerfile location suggestions", () => { + manageTriggerView.getDockerfileSuggestions() + .then((results) => { + expect(results.length).toEqual(0); + }); + }); + + it("proceeds to Docker context location section when given a valid Dockerfile location", () => { + manageTriggerView.enterDockerfileLocation('/Dockerfile') + .then(() => manageTriggerView.continue()) + .then(() => { + expect(manageTriggerView.sections['contextlocation'].isDisplayed()).toBe(true); + }) + .catch(reason => fail(reason)); + }); + + it("does not accept invalid Docker context", () => { + manageTriggerView.enterDockerContext('') + .then(() => manageTriggerView.continue()) + .then(() => fail('Should not acccept invalid Docker context location')) + .catch(() => null); + }); + + it("provides suggestions for Docker context based on Dockerfile location", () => { + manageTriggerView.getDockerContextSuggestions() + .then((results) => { + expect(results).toContain('/'); + }); + }); + + it("proceeds to robot selection section when given valid Docker context", () => { + manageTriggerView.enterDockerContext('/') + .then(() => manageTriggerView.continue()) + .then(() => { + expect(manageTriggerView.sections['robot'].isDisplayed()).toBe(true); + }) + .catch(reason => fail(reason)); + }); + + it("allows selection of optional robot account", () => { + manageTriggerView.selectRobotAccount(0) + .catch(reason => fail(reason)); + }); + + it("proceeds to verification section", () => { + manageTriggerView.continue() + .then(() => { + expect(manageTriggerView.sections['verification'].isDisplayed()).toBe(true); + }) + .catch(reason => fail(reason)); + }); + + it("displays success message after creating the trigger", () => { + manageTriggerView.continue() + .then(() => { + browser.sleep(2000); + expect($('h3').getText()).toEqual('Trigger has been successfully activated'); + }) + .catch(reason => fail(reason)); + }); + }); + + describe("for githost", () => { + + beforeAll(() => { + // Navigate to trigger setup + browser.get(`${appHost}/repository/devtable/simple?tab=builds`); + }); + + it("can select GitHub repository push as a trigger option", () => { + element(by.partialButtonText('Create Build Trigger')).click(); + element(by.linkText('GitHub Repository Push')).click(); + }); + + it("redirects to GitHub login page for granting authentication", () => { + expect(browser.getCurrentUrl()).toContain('github.com'); + + // TODO: Which credentials do we use to login to GitHub? + }); + + xit("shows namespace select section first", () => { + expect(manageTriggerView.sections['namespace'].isDisplayed()).toBe(true); + }); + }); +}); diff --git a/static/test/protractor.conf.ts b/static/test/protractor.conf.ts new file mode 100644 index 000000000..9016c1743 --- /dev/null +++ b/static/test/protractor.conf.ts @@ -0,0 +1,66 @@ +import { Config, browser } from 'protractor'; +import * as request from 'request'; + + +/* +* Use a set environment variable or default value for the app host. +*/ +export const appHost: string = process.env.APP_HOST || 'http://localhost:5000'; + + +/** + * Protractor is configured to run against a Selenium instance running locally on port 4444 and a Quay instance running + * locally on port 5000. + * Easiest method is running the Quay and Selenium containers: + * $ docker run -d --net=host -v /dev/shm:/dev/shm selenium/standalone-chrome:3.4.0 + * $ docker run -d --net=host quay.io/quay/quay + * $ yarn run e2e + */ +export const config: Config = { + framework: 'jasmine', + seleniumAddress: 'http://localhost:4444/wd/hub', + // Uncomment to run tests against local Chrome instance + // directConnect: true, + capabilities: { + browserName: 'chrome', + chromeOptions: { + args: [ + '--disable-infobars' + ], + prefs: { + 'profile.password_manager_enabled': false, + 'credentials_enable_service': false, + 'password_manager_enabled': false + } + } + }, + onPrepare: () => { + browser.driver.manage().window().maximize(); + + // Resolve promise when request returns HTTP 200 + return new Promise((resolve, reject) => { + const pollServer = (success, failure) => { + request(appHost, (error, response, body) => { + if (!error && response.statusCode == 200) { + console.log(`Successfully connected to server at ${appHost}`); + success(); + } else { + console.log(`Could not connect to server at ${appHost}`); + setTimeout(() => { + failure(success, failure); + }, 5000); + } + }); + }; + + pollServer(resolve, pollServer); + }); + }, + onComplete: () => { + browser.close(); + }, + specs: [ + './e2e/sanity.scenario.ts', + './e2e/trigger-creation.scenario.ts' + ], +}; diff --git a/yarn.lock b/yarn.lock index 47165470b..61c278bb0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -44,6 +44,10 @@ version "6.0.78" resolved "https://registry.yarnpkg.com/@types/node/-/node-6.0.78.tgz#5d4a3f579c1524e01ee21bf474e6fba09198f470" +"@types/q@^0.0.32": + version "0.0.32" + resolved "https://registry.yarnpkg.com/@types/q/-/q-0.0.32.tgz#bd284e57c84f1325da702babfc82a5328190c0c5" + "@types/react-dom@0.14.17": version "0.14.17" resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-0.14.17.tgz#d8b0dec27e873c218d9075856c6ca1c5db956d5d" @@ -54,14 +58,14 @@ version "0.14.39" resolved "https://registry.yarnpkg.com/@types/react/-/react-0.14.39.tgz#11cb715768da5f7605aa2030a5dc63e77a137eb5" +"@types/selenium-webdriver@^2.53.35", "@types/selenium-webdriver@~2.53.39": + version "2.53.42" + resolved "https://registry.yarnpkg.com/@types/selenium-webdriver/-/selenium-webdriver-2.53.42.tgz#74cb77fb6052edaff2a8984ddafd88d419f25cac" + "@types/showdown@^1.4.32": version "1.4.32" resolved "https://registry.yarnpkg.com/@types/showdown/-/showdown-1.4.32.tgz#bb0b32dbafee23ae9575df30b227e4fc2f0cd45b" -abab@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.3.tgz#b81de5f7274ec4e756d797cd834f303642724e5d" - abbrev@1, abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" @@ -79,16 +83,18 @@ acorn-dynamic-import@^2.0.0: dependencies: acorn "^4.0.3" -acorn-globals@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" - dependencies: - acorn "^4.0.4" - acorn@^4.0.3, acorn@^4.0.4: version "4.0.11" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.11.tgz#edcda3bd937e7556410d42ed5860f67399c794c0" +adm-zip@0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.4.tgz#a61ed5ae6905c3aea58b3a657d25033091052736" + +adm-zip@^0.4.7: + version "0.4.7" + resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.7.tgz#8606c2cbf1c426ce8c8ec00174447fd49b6eafc1" + after@0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" @@ -213,10 +219,6 @@ arr-flatten@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.1.tgz#e5ffe54d45e19f32f216e91eb99c8ce892bb604b" -array-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" - array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" @@ -225,7 +227,13 @@ array-slice@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5" -array-uniq@^1.0.2: +array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + +array-uniq@^1.0.1, array-uniq@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" @@ -374,6 +382,12 @@ block-stream@*: dependencies: inherits "~2.0.0" +blocking-proxy@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/blocking-proxy/-/blocking-proxy-0.0.5.tgz#462905e0dcfbea970f41aa37223dda9c07b1912b" + dependencies: + minimist "^1.2.0" + bluebird@^3.1.1, bluebird@^3.3.0: version "3.4.7" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3" @@ -821,10 +835,6 @@ constants-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" -content-type-parser@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.1.tgz#c3e56988c53c65127fb46d4032a3a900246fdc94" - content-type@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed" @@ -968,16 +978,6 @@ csso@~2.3.1: clap "^1.0.9" source-map "^0.5.3" -cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.2.tgz#b8036170c79f07a90ff2f16e22284027a243848b" - -"cssstyle@>= 0.2.37 < 0.3.0": - version "0.2.37" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-0.2.37.tgz#541097234cb2513c83ceed3acddc27ff27987d54" - dependencies: - cssom "0.3.x" - currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" @@ -1049,6 +1049,18 @@ defined@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" +del@^2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" @@ -1257,7 +1269,7 @@ escape-string-regexp@^1.0.2: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" -escodegen@1.8.x, escodegen@^1.6.1: +escodegen@1.8.x: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" dependencies: @@ -1516,7 +1528,7 @@ glob@^5.0.15: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.5, glob@^7.0.6, glob@^7.1.1: +glob@^7.0.3, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" dependencies: @@ -1527,6 +1539,17 @@ glob@^7.0.5, glob@^7.0.6, glob@^7.1.1: once "^1.3.0" path-is-absolute "^1.0.0" +globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + good-listener@^1.2.0: version "1.2.2" resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" @@ -1653,12 +1676,6 @@ html-comment-regex@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.1.tgz#668b93776eaae55ebde8f3ad464b307a4963625e" -html-encoding-sniffer@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.1.tgz#79bf7a785ea495fe66165e734153f363ff5437da" - dependencies: - whatwg-encoding "^1.0.1" - html-loader@^0.4.5: version "0.4.5" resolved "https://registry.yarnpkg.com/html-loader/-/html-loader-0.4.5.tgz#5fbcd87cd63a5c49a7fce2fe56f425e05729c68c" @@ -1726,10 +1743,6 @@ https-proxy-agent@^1.0.0: debug "2" extend "3" -iconv-lite@0.4.13: - version "0.4.13" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.13.tgz#1f88aba4ab0b1508e8312acc39345f36e992e2f2" - iconv-lite@0.4.15: version "0.4.15" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.15.tgz#fe265a218ac6a57cfe854927e9d04c19825eddeb" @@ -1775,7 +1788,7 @@ inherits@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" -ini@~1.3.0: +ini@^1.3.4, ini@~1.3.0: version "1.3.4" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" @@ -1887,6 +1900,22 @@ is-obj@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" +is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + +is-path-in-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" + dependencies: + is-path-inside "^1.0.0" + +is-path-inside@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.0.tgz#fc06e5a1683fbda13de667aff717bbc10a48f37f" + dependencies: + path-is-inside "^1.0.1" + is-plain-obj@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" @@ -2002,7 +2031,7 @@ jasmine-ts@0.0.3: ts-node "^1.2.1" typescript "^2.0.0" -jasmine@^2.4.1: +jasmine@^2.4.1, jasmine@^2.5.3: version "2.5.3" resolved "https://registry.yarnpkg.com/jasmine/-/jasmine-2.5.3.tgz#5441f254e1fc2269deb1dfd93e0e57d565ff4d22" dependencies: @@ -2010,6 +2039,10 @@ jasmine@^2.4.1: glob "^7.0.6" jasmine-core "~2.5.2" +jasminewd2@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/jasminewd2/-/jasminewd2-2.1.0.tgz#da595275d1ae631de736ac0a7c7d85c9f73ef652" + jodid25519@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" @@ -2046,32 +2079,6 @@ jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" -jsdom@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-11.0.0.tgz#1ee507cb2c0b16c875002476b1a8557d951353e5" - dependencies: - abab "^1.0.3" - acorn "^4.0.4" - acorn-globals "^3.1.0" - array-equal "^1.0.0" - content-type-parser "^1.0.1" - cssom ">= 0.3.2 < 0.4.0" - cssstyle ">= 0.2.37 < 0.3.0" - escodegen "^1.6.1" - html-encoding-sniffer "^1.0.1" - nwmatcher ">= 1.3.9 < 2.0.0" - parse5 "^3.0.2" - pn "^1.0.0" - request "^2.79.0" - request-promise-native "^1.0.3" - sax "^1.2.1" - symbol-tree "^3.2.1" - tough-cookie "^2.3.2" - webidl-conversions "^4.0.0" - whatwg-encoding "^1.0.1" - whatwg-url "^4.3.0" - xml-name-validator "^2.0.1" - jsesc@^0.5.0, jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" @@ -2145,10 +2152,6 @@ karma-jasmine@^0.3.8: version "0.3.8" resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-0.3.8.tgz#5b6457791ad9b89aa173f079e3ebe1b8c805236c" -karma-jsdom-launcher@^6.1.2: - version "6.1.2" - resolved "https://registry.yarnpkg.com/karma-jsdom-launcher/-/karma-jsdom-launcher-6.1.2.tgz#f44ad3986df096463e5bfb437dc9f35db9917a05" - karma-webpack@^1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-1.8.1.tgz#39d5fd2edeea3cc3ef5b405989b37d5b0e6a3b4e" @@ -2304,7 +2307,7 @@ lodash@^3.8.0: version "3.10.1" resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" -lodash@^4.13.1, lodash@^4.14.0, lodash@^4.5.0, lodash@~4.17.0: +lodash@^4.0.0, lodash@^4.14.0, lodash@^4.5.0, lodash@~4.17.0: version "4.17.4" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" @@ -2621,10 +2624,6 @@ number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" -"nwmatcher@>= 1.3.9 < 2.0.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.0.tgz#b4389362170e7ef9798c3c7716d80ebc0106fccf" - oauth-sign@~0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" @@ -2672,7 +2671,7 @@ once@~1.3.3: dependencies: wrappy "1" -optimist@^0.6.1: +optimist@^0.6.1, optimist@~0.6.0: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" dependencies: @@ -2763,12 +2762,6 @@ parse-json@^2.1.0, parse-json@^2.2.0: dependencies: error-ex "^1.2.0" -parse5@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-3.0.2.tgz#05eff57f0ef4577fb144a79f8b9a967a6cc44510" - dependencies: - "@types/node" "^6.0.46" - parsejson@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/parsejson/-/parsejson-0.0.3.tgz#ab7e3759f209ece99437973f7d0f1f64ae0e64ab" @@ -2805,6 +2798,10 @@ path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" +path-is-inside@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" @@ -2833,10 +2830,6 @@ pinkie@^2.0.0, pinkie@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" -pn@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/pn/-/pn-1.0.0.tgz#1cf5a30b0d806cd18f88fc41a6b5d4ad615b3ba9" - popsicle-proxy-agent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/popsicle-proxy-agent/-/popsicle-proxy-agent-3.0.0.tgz#b9133c55d945759ab7ee61b7711364620d3aeadc" @@ -3140,6 +3133,26 @@ promise-finally@^2.0.1: dependencies: any-promise "^1.3.0" +protractor@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/protractor/-/protractor-5.1.2.tgz#9b221741709a4c62d5cd53c6aadd54a71137e95f" + dependencies: + "@types/node" "^6.0.46" + "@types/q" "^0.0.32" + "@types/selenium-webdriver" "~2.53.39" + blocking-proxy "0.0.5" + chalk "^1.1.3" + glob "^7.0.3" + jasmine "^2.5.3" + jasminewd2 "^2.1.0" + optimist "~0.6.0" + q "1.4.1" + saucelabs "~1.3.0" + selenium-webdriver "3.0.1" + source-map-support "~0.4.0" + webdriver-js-extender "^1.0.0" + webdriver-manager "^12.0.6" + prr@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" @@ -3162,7 +3175,7 @@ punycode@^1.2.4, punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" -q@^1.1.2: +q@1.4.1, q@^1.1.2, q@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/q/-/q-1.4.1.tgz#55705bcd93c5f3673530c2c2cbc0c2b3addc286e" @@ -3407,21 +3420,7 @@ repeating@^2.0.0: dependencies: is-finite "^1.0.0" -request-promise-core@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.1.tgz#3eee00b2c5aa83239cfb04c5700da36f81cd08b6" - dependencies: - lodash "^4.13.1" - -request-promise-native@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.4.tgz#86988ec8eee408e45579fce83bfd05b3adf9a155" - dependencies: - request-promise-core "1.1.1" - stealthy-require "^1.1.0" - tough-cookie ">=2.3.0" - -request@^2.79.0: +request@^2.78.0, request@^2.79.0: version "2.79.0" resolved "https://registry.yarnpkg.com/request/-/request-2.79.0.tgz#4dfe5bf6be8b8cdc37fcf93e04b65577722710de" dependencies: @@ -3474,7 +3473,7 @@ right-align@^0.1.1: dependencies: align-text "^0.1.1" -rimraf@2, rimraf@^2.4.4, rimraf@^2.6.0: +rimraf@2, rimraf@^2.2.8, rimraf@^2.4.4, rimraf@^2.5.2, rimraf@^2.5.4, rimraf@^2.6.0: version "2.6.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" dependencies: @@ -3500,7 +3499,17 @@ safe-buffer@^5.0.1: version "5.1.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.0.tgz#fe4c8460397f9eaaaa58e73be46273408a45e223" -sax@^1.2.1, sax@~1.2.1: +saucelabs@~1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/saucelabs/-/saucelabs-1.3.0.tgz#d240e8009df7fa87306ec4578a69ba3b5c424fee" + dependencies: + https-proxy-agent "^1.0.0" + +sax@0.6.x: + version "0.6.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-0.6.1.tgz#563b19c7c1de892e09bfc4f2fc30e3c27f0952b9" + +sax@>=0.6.0, sax@~1.2.1: version "1.2.2" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828" @@ -3514,13 +3523,32 @@ select@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" +selenium-webdriver@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/selenium-webdriver/-/selenium-webdriver-3.0.1.tgz#a2dea5da4a97f6672e89e7ca7276cefa365147a7" + dependencies: + adm-zip "^0.4.7" + rimraf "^2.5.4" + tmp "0.0.30" + xml2js "^0.4.17" + +selenium-webdriver@^2.53.2: + version "2.53.3" + resolved "https://registry.yarnpkg.com/selenium-webdriver/-/selenium-webdriver-2.53.3.tgz#d29ff5a957dff1a1b49dc457756e4e4bfbdce085" + dependencies: + adm-zip "0.4.4" + rimraf "^2.2.8" + tmp "0.0.24" + ws "^1.0.1" + xml2js "0.4.4" + semver-diff@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" dependencies: semver "^5.0.3" -"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.0.3, semver@^5.1.0, semver@~5.3.0: +"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@~5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" @@ -3642,7 +3670,7 @@ source-map-loader@0.1.5: loader-utils "~0.2.2" source-map "~0.1.33" -source-map-support@^0.4.0: +source-map-support@^0.4.0, source-map-support@~0.4.0: version "0.4.11" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.11.tgz#647f939978b38535909530885303daf23279f322" dependencies: @@ -3707,10 +3735,6 @@ sshpk@^1.7.0: version "1.3.1" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e" -stealthy-require@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" - stream-browserify@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db" @@ -3764,6 +3788,10 @@ strip-bom@^2.0.0: dependencies: is-utf8 "^0.2.0" +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" @@ -3806,10 +3834,6 @@ symbol-observable@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.4.tgz#29bf615d4aa7121bdd898b22d4b3f9bc4e2aa03d" -symbol-tree@^3.2.1: - version "3.2.2" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" - tapable@^0.1.8: version "0.1.10" resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.1.10.tgz#29c35707c2b70e50d07482b5d202e8ed446dafd4" @@ -3873,6 +3897,16 @@ tiny-emitter@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-1.2.0.tgz#6dc845052cb08ebefc1874723b58f24a648c3b6f" +tmp@0.0.24: + version "0.0.24" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.24.tgz#d6a5e198d14a9835cc6f2d7c3d9e302428c8cf12" + +tmp@0.0.30: + version "0.0.30" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.30.tgz#72419d4a8be7d6ce75148fd8b324e593a711c2ed" + dependencies: + os-tmpdir "~1.0.1" + tmp@0.0.31, tmp@0.0.x: version "0.0.31" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.31.tgz#8f38ab9438e17315e5dbd8b3657e8bfb277ae4a7" @@ -3893,16 +3927,12 @@ touch@^1.0.0: dependencies: nopt "~1.0.10" -tough-cookie@>=2.3.0, tough-cookie@^2.0.0, tough-cookie@^2.3.2, tough-cookie@~2.3.0: +tough-cookie@^2.0.0, tough-cookie@~2.3.0: version "2.3.2" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" dependencies: punycode "^1.4.1" -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" @@ -3939,6 +3969,21 @@ ts-node@^1.2.1: xtend "^4.0.0" yn "^1.2.0" +ts-node@^3.0.6: + version "3.0.6" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-3.0.6.tgz#55127ff790c7eebf6ba68c1e6dde94b09aaa21e0" + dependencies: + arrify "^1.0.0" + chalk "^1.1.1" + diff "^3.1.0" + make-error "^1.1.1" + minimist "^1.2.0" + mkdirp "^0.5.1" + source-map-support "^0.4.0" + tsconfig "^6.0.0" + v8flags "^2.0.11" + yn "^2.0.0" + tsconfig@^5.0.2: version "5.0.3" resolved "https://registry.yarnpkg.com/tsconfig/-/tsconfig-5.0.3.tgz#5f4278e701800967a8fc383fd19648878f2a6e3a" @@ -3948,6 +3993,13 @@ tsconfig@^5.0.2: strip-bom "^2.0.0" strip-json-comments "^2.0.0" +tsconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/tsconfig/-/tsconfig-6.0.0.tgz#6b0e8376003d7af1864f8df8f89dd0059ffcd032" + dependencies: + strip-bom "^3.0.0" + strip-json-comments "^2.0.0" + tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" @@ -4207,13 +4259,28 @@ wcwidth@^1.0.0: dependencies: defaults "^1.0.3" -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" +webdriver-js-extender@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/webdriver-js-extender/-/webdriver-js-extender-1.0.0.tgz#81c533a9e33d5bfb597b4e63e2cdb25b54777515" + dependencies: + "@types/selenium-webdriver" "^2.53.35" + selenium-webdriver "^2.53.2" -webidl-conversions@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.1.tgz#8015a17ab83e7e1b311638486ace81da6ce206a0" +webdriver-manager@^12.0.6: + version "12.0.6" + resolved "https://registry.yarnpkg.com/webdriver-manager/-/webdriver-manager-12.0.6.tgz#3df1a481977010b4cbf8c9d85c7a577828c0e70b" + dependencies: + adm-zip "^0.4.7" + chalk "^1.1.1" + del "^2.2.0" + glob "^7.0.3" + ini "^1.3.4" + minimist "^1.2.0" + q "^1.4.1" + request "^2.78.0" + rimraf "^2.5.2" + semver "^5.3.0" + xml2js "^0.4.17" webpack-dev-middleware@^1.0.11: version "1.10.1" @@ -4256,19 +4323,6 @@ webpack@^2.2: webpack-sources "^0.1.4" yargs "^6.0.0" -whatwg-encoding@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.1.tgz#3c6c451a198ee7aec55b1ec61d0920c67801a5f4" - dependencies: - iconv-lite "0.4.13" - -whatwg-url@^4.3.0: - version "4.8.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-4.8.0.tgz#d2981aa9148c1e00a41c5a6131166ab4683bbcc0" - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - whet.extend@~0.9.9: version "0.9.9" resolved "https://registry.yarnpkg.com/whet.extend/-/whet.extend-0.9.9.tgz#f877d5bf648c97e5aa542fadc16d6a259b9c11a1" @@ -4330,7 +4384,7 @@ write-file-atomic@^1.1.2: imurmurhash "^0.1.4" slide "^1.1.5" -ws@1.1.2: +ws@1.1.2, ws@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.2.tgz#8a244fa052401e08c9886cf44a85189e1fd4067f" dependencies: @@ -4351,9 +4405,25 @@ xml-char-classes@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/xml-char-classes/-/xml-char-classes-1.0.0.tgz#64657848a20ffc5df583a42ad8a277b4512bbc4d" -xml-name-validator@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-2.0.1.tgz#4d8b8f1eccd3419aa362061becef515e1e559635" +xml2js@0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.4.tgz#3111010003008ae19240eba17497b57c729c555d" + dependencies: + sax "0.6.x" + xmlbuilder ">=1.0.0" + +xml2js@^0.4.17: + version "0.4.17" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.17.tgz#17be93eaae3f3b779359c795b419705a8817e868" + dependencies: + sax ">=0.6.0" + xmlbuilder "^4.1.0" + +xmlbuilder@>=1.0.0, xmlbuilder@^4.1.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-4.2.1.tgz#aa58a3041a066f90eaa16c2f5389ff19f3f461a5" + dependencies: + lodash "^4.0.0" xmlhttprequest-ssl@1.5.3: version "1.5.3" @@ -4408,6 +4478,10 @@ yn@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/yn/-/yn-1.2.0.tgz#d237a4c533f279b2b89d3acac2db4b8c795e4a63" +yn@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" + zeroclipboard@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/zeroclipboard/-/zeroclipboard-2.3.0.tgz#592ebd833a4308688b0739697d3dbf989002c9af" From f5c1ae071936412bae8f47cb6c1b0d30c2d393bc Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Fri, 23 Jun 2017 11:27:07 -0700 Subject: [PATCH 03/22] fixed AngularJS escaping '?' character in URL --- static/js/services/plan-service.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/js/services/plan-service.js b/static/js/services/plan-service.js index 79d2a0a1b..af5716a57 100644 --- a/static/js/services/plan-service.js +++ b/static/js/services/plan-service.js @@ -67,9 +67,9 @@ function(KeyService, UserService, CookieService, ApiService, Features, Config, $ planService.getPlan(planId, function(plan) { if (planService.isOrgCompatible(plan)) { - $location.path('/organizations/new/?plan=' + planId); + $location.path('/organizations/new').search('plan', planId); } else { - $location.path('/user?plan=' + planId); + $location.path('/user').search('plan', planId); } }); }); From 7b72cf8b27596fcaf29b8b6b788108b76b06f46b Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Fri, 23 Jun 2017 16:59:39 -0400 Subject: [PATCH 04/22] Small fix for georeplication and add better logs Previously, if we attempted to georeplicate storage from the existing location and, somehow, that existing location did not exist, we'd still mark the new location as invalid. This is a major problem for storage engines that are not consistent. Now, we first try a back off strategy to find the image in the existing storage and, as well, if the replication fails in any way, we log it. --- workers/storagereplication.py | 68 ++++++++++++++++++++++++----------- 1 file changed, 48 insertions(+), 20 deletions(-) diff --git a/workers/storagereplication.py b/workers/storagereplication.py index 42e83b1d2..9eb3a9bd9 100644 --- a/workers/storagereplication.py +++ b/workers/storagereplication.py @@ -1,8 +1,8 @@ -import os import logging -import features import time +import features + from app import app, storage, image_replication_queue from data.database import CloseForLongOperation from data import model @@ -17,17 +17,27 @@ RESERVATION_SECONDS = app.config.get('STORAGE_REPLICATION_PROCESSING_SECONDS', 6 class StorageReplicationWorker(QueueWorker): def process_queue_item(self, job_details): storage_uuid = job_details['storage_id'] - logger.debug('Starting replication of image storage %s', storage_uuid) + namespace_id = job_details['namespace_user_id'] - namespace = model.user.get_namespace_user_by_user_id(job_details['namespace_user_id']) - if not self.replicate_storage(namespace, storage_uuid): + logger.debug('Starting replication of image storage %s under namespace %s', storage_uuid, + namespace_id) + try: + namespace = model.user.get_namespace_user_by_user_id(namespace_id) + except model.user.InvalidUsernameException: + logger.exception('Exception when looking up namespace %s for replication of image storage %s', + namespace_id, storage_uuid) + return + + succeeded = self.replicate_storage(namespace, storage_uuid) + logger.debug('Replication finished of image storage %s under namespace %s: %s', + storage_uuid, namespace_id, succeeded) + if not succeeded: raise WorkerUnhealthyException() - def replicate_storage(self, namespace, storage_uuid): # Lookup the namespace and its associated regions. if not namespace: - logger.debug('Unknown namespace: %s', namespace) + logger.debug('Unknown namespace when trying to replicate storage %s', storage_uuid) return True locations = model.user.get_region_locations(namespace) @@ -42,26 +52,44 @@ class StorageReplicationWorker(QueueWorker): locations_required = locations | set(storage.default_locations) locations_missing = locations_required - set(partial_storage.locations) + logger.debug('For replication of storage %s under namespace %s: %s required; %s missing', + storage_uuid, namespace.username, locations_required, locations_missing) + if not locations_missing: - logger.debug('No missing locations for storage %s under namespace %s', - storage_uuid, namespace.username) + logger.debug('No missing locations for storage %s under namespace %s. Required: %s', + storage_uuid, namespace.username, locations_required) return True # For any missing storage locations, initiate a copy. existing_location = list(partial_storage.locations)[0] + path_to_copy = model.storage.get_layer_path(partial_storage) + # Lookup the existing location. If not found, progressively sleep a few times to handle the case + # of not fully consistent storage. + for retry in range(0, 3): + if storage.exists([existing_location], path_to_copy): + break + + logger.debug('Cannot find image storage %s in existing location %s (try #%s)', + storage_uuid, existing_location, retry) + time.sleep(pow(2, retry) * 5) + + if not storage.exists([existing_location], path_to_copy): + logger.warning('Cannot find image storage %s in existing location %s; stopping replication', + storage_uuid, existing_location) + return False + + # For each missing location, copy over the storage. for location in locations_missing: - logger.debug('Copying image storage %s to location %s', partial_storage.uuid, location) + logger.debug('Starting copy of storage %s to location %s from %s', partial_storage.uuid, + location, existing_location) # Copy the binary data. - path_to_copy = model.storage.get_layer_path(partial_storage) copied = False - try: - if storage.exists([existing_location], path_to_copy): - with CloseForLongOperation(app.config): - storage.copy_between(path_to_copy, existing_location, location) - copied = True + with CloseForLongOperation(app.config): + storage.copy_between(path_to_copy, existing_location, location) + copied = True except: logger.exception('Exception when copying path %s of image storage %s to location %s', path_to_copy, partial_storage.uuid, location) @@ -71,11 +99,11 @@ class StorageReplicationWorker(QueueWorker): # completed. if copied: model.storage.add_storage_placement(partial_storage, location) - logger.debug('Finished copy of image storage %s to location %s', - partial_storage.uuid, location) + logger.debug('Finished copy of image storage %s to location %s from %s', + partial_storage.uuid, location, existing_location) - logger.debug('Completed replication of image storage %s to locations %s', - partial_storage.uuid, locations_missing) + logger.debug('Completed replication of image storage %s to locations %s from %s', + partial_storage.uuid, locations_missing, existing_location) return True From e51436239452e50748596c7edf44e0fc4970c439 Mon Sep 17 00:00:00 2001 From: Antoine Legrand <2t.antoine@gmail.com> Date: Mon, 26 Jun 2017 15:21:59 +0200 Subject: [PATCH 05/22] use-docker host dns for CI --- .gitlab-ci.jsonnet | 1 + .gitlab-ci.yml | 21 +++++++++------------ .gitlab-ci/base_jobs.libsonnet | 10 +++++----- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/.gitlab-ci.jsonnet b/.gitlab-ci.jsonnet index 38303cfd6..7b4f63fff 100644 --- a/.gitlab-ci.jsonnet +++ b/.gitlab-ci.jsonnet @@ -13,6 +13,7 @@ local stages_list = [ 'docker_release', 'teardown', ]; + local stages = utils.set(stages_list); // List CI jobs diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5374fd23c..6f71be9be 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -11,13 +11,12 @@ container-base-build: script: - docker build --cache-from quay.io/quay/quay-base:latest -t quay.io/quay/quay-base:latest -f quay-base.dockerfile . - docker push quay.io/quay/quay-base:latest - services: - - docker:dind stage: docker_base tags: - - docker + - kubernetes variables: - DOCKER_DRIVER: aufs + DOCKER_DRIVER: overlay + DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375 container-build: before_script: - docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io @@ -25,13 +24,12 @@ container-build: script: - docker build -t quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} -f quay.dockerfile . - docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} - services: - - docker:dind stage: docker_build tags: - - docker + - kubernetes variables: - DOCKER_DRIVER: aufs + DOCKER_DRIVER: overlay + DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375 container-release: before_script: - docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io @@ -43,13 +41,12 @@ container-release: - docker pull quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} - docker tag quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG} quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA} - docker push quay.io/quay/quay-ci:${CI_COMMIT_REF_SLUG}-${CI_COMMIT_SHA} - services: - - docker:dind stage: docker_release tags: - - docker + - kubernetes variables: - DOCKER_DRIVER: aufs + DOCKER_DRIVER: overlay + DOCKER_HOST: tcp://docker-host.gitlab-runner.svc.cluster.local:2375 karma-tests: before_script: - cd / diff --git a/.gitlab-ci/base_jobs.libsonnet b/.gitlab-ci/base_jobs.libsonnet index d45227087..85eb5dcf0 100644 --- a/.gitlab-ci/base_jobs.libsonnet +++ b/.gitlab-ci/base_jobs.libsonnet @@ -3,17 +3,17 @@ function(vars={}) dockerBuild: { // base job to manage containers (build / push) variables: { - DOCKER_DRIVER: "aufs", + DOCKER_DRIVER: "overlay", + DOCKER_HOST: "tcp://docker-host.gitlab-runner.svc.cluster.local:2375" }, + image: "docker:git", before_script: [ "docker login -u $DOCKER_USER -p $DOCKER_PASS quay.io", ], - services: [ - "docker:dind", - ], + tags: [ - "docker", + "kubernetes", ], }, From 46087d5e6422cf59b122e234409cc303949c49d9 Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Wed, 21 Jun 2017 15:27:56 -0400 Subject: [PATCH 06/22] util.secscan.api: more robust API failures cases Addresses QUAY-672 by handling all status codes that are not 404 and 5xx and moving response decoding inside the try/except block to ensure that the response object is in scope. --- util/secscan/api.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/util/secscan/api.py b/util/secscan/api.py index 48b5d84f2..918c3fd23 100644 --- a/util/secscan/api.py +++ b/util/secscan/api.py @@ -387,18 +387,27 @@ class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface): response = self._call('GET', _API_METHOD_GET_LAYER % layer_id, params=params) logger.debug('Got response %s for vulnerabilities for layer %s', response.status_code, layer_id) + try: + return response.json() + except ValueError: + logger.exception('Failed to decode response JSON') + return None + except Non200ResponseException as ex: logger.debug('Got failed response %s for vulnerabilities for layer %s', ex.response.status_code, layer_id) if ex.response.status_code == 404: return None - elif ex.response.status_code // 100 == 5: + else: logger.error( 'downstream security service failure: status %d, text: %s', ex.response.status_code, ex.response.text, ) - raise APIRequestFailure('Downstream service returned 5xx') + if ex.response.status_code // 100 == 5: + raise APIRequestFailure('Downstream service returned 5xx') + else: + raise APIRequestFailure('Downstream service returned non-200') except requests.exceptions.Timeout: raise APIRequestFailure('API call timed out') except requests.exceptions.ConnectionError: @@ -407,11 +416,6 @@ class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface): logger.exception('Failed to get layer data response for %s', layer_id) raise APIRequestFailure() - try: - return response.json() - except ValueError: - logger.exception('Failed to decode response JSON') - def _request(self, method, endpoint, path, body, params, timeout): """ Issues an HTTP request to the security endpoint. """ From 0e26a03f7ef1f04c459fe41737159761cd17f07e Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Mon, 26 Jun 2017 18:10:39 -0400 Subject: [PATCH 07/22] endpoints.v2: new fs layout for data interface Fixes QUAY-658 --- endpoints/v2/__init__.py | 2 +- endpoints/v2/blob.py | 4 +- endpoints/v2/catalog.py | 2 +- endpoints/v2/manifest.py | 9 +- endpoints/v2/models_interface.py | 255 +++++++++++++++++ .../v2.py => endpoints/v2/models_pre_oci.py | 267 +----------------- endpoints/v2/tag.py | 4 +- endpoints/v2/v2auth.py | 6 +- 8 files changed, 280 insertions(+), 269 deletions(-) create mode 100644 endpoints/v2/models_interface.py rename data/interfaces/v2.py => endpoints/v2/models_pre_oci.py (59%) diff --git a/endpoints/v2/__init__.py b/endpoints/v2/__init__.py index 1b63c6f52..a2b95e972 100644 --- a/endpoints/v2/__init__.py +++ b/endpoints/v2/__init__.py @@ -15,9 +15,9 @@ from auth.auth_context import get_grant_context from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission) from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers -from data.interfaces.v2 import pre_oci_model as model from endpoints.decorators import anon_protect, anon_allowed from endpoints.v2.errors import V2RegistryException, Unauthorized, Unsupported, NameUnknown +from endpoints.v2.models_pre_oci import data_model as model from util.http import abort from util.metrics.metricqueue import time_blueprint from util.registry.dockerver import docker_version diff --git a/endpoints/v2/blob.py b/endpoints/v2/blob.py index cd77ee2ee..d8b361318 100644 --- a/endpoints/v2/blob.py +++ b/endpoints/v2/blob.py @@ -10,13 +10,13 @@ import resumablehashlib from app import storage, app, get_app_url, metric_queue from auth.registry_jwt_auth import process_registry_jwt_auth from data import database -from data.interfaces.v2 import pre_oci_model as model from digest import digest_tools from endpoints.common import parse_repository_name +from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge) -from endpoints.decorators import anon_protect +from endpoints.v2.models_pre_oci import data_model as model from util.cache import cache_control from util.registry.filelike import wrap_with_handler, StreamSlice from util.registry.gzipstream import calculate_size_handler diff --git a/endpoints/v2/catalog.py b/endpoints/v2/catalog.py index 18c27db82..01720bce4 100644 --- a/endpoints/v2/catalog.py +++ b/endpoints/v2/catalog.py @@ -5,7 +5,7 @@ from flask import jsonify from auth.registry_jwt_auth import process_registry_jwt_auth, get_granted_entity from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp, paginate -from data.interfaces.v2 import pre_oci_model as model +from endpoints.v2.models_pre_oci import data_model as model @v2_bp.route('/_catalog', methods=['GET']) @process_registry_jwt_auth() diff --git a/endpoints/v2/manifest.py b/endpoints/v2/manifest.py index 732403598..09c70e169 100644 --- a/endpoints/v2/manifest.py +++ b/endpoints/v2/manifest.py @@ -8,14 +8,15 @@ import features from app import docker_v2_signing_key, app, metric_queue from auth.registry_jwt_auth import process_registry_jwt_auth -from data.interfaces.v2 import pre_oci_model as model, Label from digest import digest_tools from endpoints.common import parse_repository_name from endpoints.decorators import anon_protect -from endpoints.v2 import v2_bp, require_repo_read, require_repo_write -from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid, - NameInvalid) from endpoints.notificationhelper import spawn_notification +from endpoints.v2 import v2_bp, require_repo_read, require_repo_write +from endpoints.v2.errors import ( + BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid, NameInvalid) +from endpoints.v2.models_interface import Label +from endpoints.v2.models_pre_oci import data_model as model from image.docker import ManifestException from image.docker.schema1 import DockerSchema1Manifest, DockerSchema1ManifestBuilder from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES diff --git a/endpoints/v2/models_interface.py b/endpoints/v2/models_interface.py new file mode 100644 index 000000000..1904871f3 --- /dev/null +++ b/endpoints/v2/models_interface.py @@ -0,0 +1,255 @@ +from abc import ABCMeta, abstractmethod +from collections import namedtuple + +from namedlist import namedlist +from six import add_metaclass + + +class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', + 'is_public', 'kind', 'trust_enabled'])): + """ + Repository represents a namespaced collection of tags. + :type id: int + :type name: string + :type namespace_name: string + :type description: string + :type is_public: bool + :type kind: string + :type trust_enabled: bool + """ + +class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])): + """ + ManifestJSON represents a Manifest of any format. + """ + + +class Tag(namedtuple('Tag', ['name', 'repository'])): + """ + Tag represents a user-facing alias for referencing a set of Manifests. + """ + + +class BlobUpload(namedlist('BlobUpload', ['uuid', 'byte_count', 'uncompressed_byte_count', + 'chunk_count', 'sha_state', 'location_name', + 'storage_metadata', 'piece_sha_state', 'piece_hashes', + 'repo_namespace_name', 'repo_name'])): + """ + BlobUpload represents the current state of an Blob being uploaded. + """ + + +class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])): + """ + Blob represents an opaque binary blob saved to the storage system. + """ + + +class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])): + """ + RepositoryReference represents a reference to a Repository, without its full metadata. + """ + +class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])): + """ + Label represents a key-value pair that describes a particular Manifest. + """ + + +@add_metaclass(ABCMeta) +class DockerRegistryV2DataInterface(object): + """ + Interface that represents all data store interactions required by a Docker Registry v1. + """ + + @abstractmethod + def create_repository(self, namespace_name, repo_name, creating_user=None): + """ + Creates a new repository under the specified namespace with the given name. The user supplied is + the user creating the repository, if any. + """ + pass + + @abstractmethod + def get_repository(self, namespace_name, repo_name): + """ + Returns a repository tuple for the repository with the given name under the given namespace. + Returns None if no such repository was found. + """ + pass + + @abstractmethod + def has_active_tag(self, namespace_name, repo_name, tag_name): + """ + Returns whether there is an active tag for the tag with the given name under the matching + repository, if any, or none if none. + """ + pass + + @abstractmethod + def get_manifest_by_tag(self, namespace_name, repo_name, tag_name): + """ + Returns the current manifest for the tag with the given name under the matching repository, if + any, or None if none. + """ + pass + + @abstractmethod + def get_manifest_by_digest(self, namespace_name, repo_name, digest): + """ + Returns the manifest matching the given digest under the matching repository, if any, or None if + none. + """ + pass + + @abstractmethod + def delete_manifest_by_digest(self, namespace_name, repo_name, digest): + """ + Deletes the manifest with the associated digest (if any) and returns all removed tags that + pointed to that manifest. If the manifest was not found, returns an empty list. + """ + pass + + @abstractmethod + def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name): + """ + Returns the Docker V1 metadata associated with the tag with the given name under the matching + repository, if any. If none, returns None. + """ + pass + + @abstractmethod + def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids): + """ + Returns a map of Docker V1 metadata for each given image ID, matched under the repository with + the given namespace and name. Returns an empty map if the matching repository was not found. + """ + pass + + @abstractmethod + def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id): + """ + Returns an ordered list containing the Docker V1 metadata for each parent of the image with the + given docker ID under the matching repository. Returns an empty list if the image was not found. + """ + pass + + @abstractmethod + def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest, + manifest_bytes): + """ + Creates a new manifest with the given digest and byte data, and assigns the tag with the given + name under the matching repository to it. + """ + pass + + @abstractmethod + def synthesize_v1_image(self, repository, storage, image_id, created, comment, command, + compat_json, parent_image_id): + """ + Synthesizes a V1 image under the specified repository, pointing to the given storage and returns + the V1 metadata for the synthesized image. + """ + pass + + @abstractmethod + def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id, + manifest_digest, manifest_bytes): + """ + Saves a manifest pointing to the given leaf image, with the given manifest, under the matching + repository as a tag with the given name. + + Returns a boolean whether or not the tag was newly created or not. + """ + pass + + @abstractmethod + def repository_tags(self, namespace_name, repo_name, limit, offset): + """ + Returns the active tags under the repository with the given name and namespace. + """ + pass + + @abstractmethod + def get_visible_repositories(self, username, limit, offset): + """ + Returns the repositories visible to the user with the given username, if any. + """ + pass + + @abstractmethod + def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata): + """ + Creates a blob upload under the matching repository with the given UUID and metadata. + Returns whether the matching repository exists. + """ + pass + + @abstractmethod + def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid): + """ + Searches for a blob upload with the given UUID under the given repository and returns it or None + if none. + """ + pass + + @abstractmethod + def update_blob_upload(self, blob_upload): + """ + Saves any changes to the blob upload object given to the backing data store. + Fields that can change: + - uncompressed_byte_count + - piece_hashes + - piece_sha_state + - storage_metadata + - byte_count + - chunk_count + - sha_state + """ + pass + + @abstractmethod + def delete_blob_upload(self, namespace_name, repo_name, uuid): + """ + Deletes the blob upload with the given uuid under the matching repository. If none, does + nothing. + """ + pass + + @abstractmethod + def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload, + expiration_sec): + """ + Creates a blob and links a temporary tag with the specified expiration to it under the matching + repository. + """ + pass + + @abstractmethod + def get_blob_by_digest(self, namespace_name, repo_name, digest): + """ + Returns the blob with the given digest under the matching repository or None if none. + """ + pass + + @abstractmethod + def save_bittorrent_pieces(self, blob, piece_size, piece_bytes): + """ + Saves the BitTorrent piece hashes for the given blob. + """ + pass + + @abstractmethod + def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels): + """ + Creates a new labels for the provided manifest. + """ + pass + + + @abstractmethod + def get_blob_path(self, blob): + """ + Once everything is moved over, this could be in util.registry and not even touch the database. + """ + pass diff --git a/data/interfaces/v2.py b/endpoints/v2/models_pre_oci.py similarity index 59% rename from data/interfaces/v2.py rename to endpoints/v2/models_pre_oci.py index 56f778bb8..3a963b3fb 100644 --- a/data/interfaces/v2.py +++ b/endpoints/v2/models_pre_oci.py @@ -1,267 +1,22 @@ -from abc import ABCMeta, abstractmethod -from collections import namedtuple - -from namedlist import namedlist from peewee import IntegrityError -from six import add_metaclass from data import model, database from data.model import DataModelException +from endpoints.v2.models_interface import ( + Blob, + BlobUpload, + DockerRegistryV2DataInterface, + ManifestJSON, + Repository, + RepositoryReference, + Tag, +) from image.docker.v1 import DockerV1Metadata + _MEDIA_TYPE = "application/vnd.docker.distribution.manifest.v1+prettyjws" -class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', - 'is_public', 'kind', 'trust_enabled'])): - """ - Repository represents a namespaced collection of tags. - :type id: int - :type name: string - :type namespace_name: string - :type description: string - :type is_public: bool - :type kind: string - :type trust_enabled: bool - """ - -class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])): - """ - ManifestJSON represents a Manifest of any format. - """ - - -class Tag(namedtuple('Tag', ['name', 'repository'])): - """ - Tag represents a user-facing alias for referencing a set of Manifests. - """ - - -class BlobUpload(namedlist('BlobUpload', ['uuid', 'byte_count', 'uncompressed_byte_count', - 'chunk_count', 'sha_state', 'location_name', - 'storage_metadata', 'piece_sha_state', 'piece_hashes', - 'repo_namespace_name', 'repo_name'])): - """ - BlobUpload represents the current state of an Blob being uploaded. - """ - - -class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])): - """ - Blob represents an opaque binary blob saved to the storage system. - """ - - -class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])): - """ - RepositoryReference represents a reference to a Repository, without its full metadata. - """ - -class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])): - """ - Label represents a key-value pair that describes a particular Manifest. - """ - - -@add_metaclass(ABCMeta) -class DockerRegistryV2DataInterface(object): - """ - Interface that represents all data store interactions required by a Docker Registry v1. - """ - - @abstractmethod - def create_repository(self, namespace_name, repo_name, creating_user=None): - """ - Creates a new repository under the specified namespace with the given name. The user supplied is - the user creating the repository, if any. - """ - pass - - @abstractmethod - def get_repository(self, namespace_name, repo_name): - """ - Returns a repository tuple for the repository with the given name under the given namespace. - Returns None if no such repository was found. - """ - pass - - @abstractmethod - def has_active_tag(self, namespace_name, repo_name, tag_name): - """ - Returns whether there is an active tag for the tag with the given name under the matching - repository, if any, or none if none. - """ - pass - - @abstractmethod - def get_manifest_by_tag(self, namespace_name, repo_name, tag_name): - """ - Returns the current manifest for the tag with the given name under the matching repository, if - any, or None if none. - """ - pass - - @abstractmethod - def get_manifest_by_digest(self, namespace_name, repo_name, digest): - """ - Returns the manifest matching the given digest under the matching repository, if any, or None if - none. - """ - pass - - @abstractmethod - def delete_manifest_by_digest(self, namespace_name, repo_name, digest): - """ - Deletes the manifest with the associated digest (if any) and returns all removed tags that - pointed to that manifest. If the manifest was not found, returns an empty list. - """ - pass - - @abstractmethod - def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name): - """ - Returns the Docker V1 metadata associated with the tag with the given name under the matching - repository, if any. If none, returns None. - """ - pass - - @abstractmethod - def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids): - """ - Returns a map of Docker V1 metadata for each given image ID, matched under the repository with - the given namespace and name. Returns an empty map if the matching repository was not found. - """ - pass - - @abstractmethod - def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id): - """ - Returns an ordered list containing the Docker V1 metadata for each parent of the image with the - given docker ID under the matching repository. Returns an empty list if the image was not found. - """ - pass - - @abstractmethod - def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest, - manifest_bytes): - """ - Creates a new manifest with the given digest and byte data, and assigns the tag with the given - name under the matching repository to it. - """ - pass - - @abstractmethod - def synthesize_v1_image(self, repository, storage, image_id, created, comment, command, - compat_json, parent_image_id): - """ - Synthesizes a V1 image under the specified repository, pointing to the given storage and returns - the V1 metadata for the synthesized image. - """ - pass - - @abstractmethod - def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id, - manifest_digest, manifest_bytes): - """ - Saves a manifest pointing to the given leaf image, with the given manifest, under the matching - repository as a tag with the given name. - - Returns a boolean whether or not the tag was newly created or not. - """ - pass - - @abstractmethod - def repository_tags(self, namespace_name, repo_name, limit, offset): - """ - Returns the active tags under the repository with the given name and namespace. - """ - pass - - @abstractmethod - def get_visible_repositories(self, username, limit, offset): - """ - Returns the repositories visible to the user with the given username, if any. - """ - pass - - @abstractmethod - def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata): - """ - Creates a blob upload under the matching repository with the given UUID and metadata. - Returns whether the matching repository exists. - """ - pass - - @abstractmethod - def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid): - """ - Searches for a blob upload with the given UUID under the given repository and returns it or None - if none. - """ - pass - - @abstractmethod - def update_blob_upload(self, blob_upload): - """ - Saves any changes to the blob upload object given to the backing data store. - Fields that can change: - - uncompressed_byte_count - - piece_hashes - - piece_sha_state - - storage_metadata - - byte_count - - chunk_count - - sha_state - """ - pass - - @abstractmethod - def delete_blob_upload(self, namespace_name, repo_name, uuid): - """ - Deletes the blob upload with the given uuid under the matching repository. If none, does - nothing. - """ - pass - - @abstractmethod - def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload, - expiration_sec): - """ - Creates a blob and links a temporary tag with the specified expiration to it under the matching - repository. - """ - pass - - @abstractmethod - def get_blob_by_digest(self, namespace_name, repo_name, digest): - """ - Returns the blob with the given digest under the matching repository or None if none. - """ - pass - - @abstractmethod - def save_bittorrent_pieces(self, blob, piece_size, piece_bytes): - """ - Saves the BitTorrent piece hashes for the given blob. - """ - pass - - @abstractmethod - def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels): - """ - Creates a new labels for the provided manifest. - """ - pass - - - @abstractmethod - def get_blob_path(self, blob): - """ - Once everything is moved over, this could be in util.registry and not even touch the database. - """ - pass - - class PreOCIModel(DockerRegistryV2DataInterface): """ PreOCIModel implements the data model for the v2 Docker Registry protocol using a database schema @@ -544,4 +299,4 @@ def _repository_for_repo(repo): ) -pre_oci_model = PreOCIModel() +data_model = PreOCIModel() diff --git a/endpoints/v2/tag.py b/endpoints/v2/tag.py index 683480ac2..9c0e81b02 100644 --- a/endpoints/v2/tag.py +++ b/endpoints/v2/tag.py @@ -2,9 +2,9 @@ from flask import jsonify from auth.registry_jwt_auth import process_registry_jwt_auth from endpoints.common import parse_repository_name -from endpoints.v2 import v2_bp, require_repo_read, paginate from endpoints.decorators import anon_protect -from data.interfaces.v2 import pre_oci_model as model +from endpoints.v2 import v2_bp, require_repo_read, paginate +from endpoints.v2.models_pre_oci import data_model as model @v2_bp.route('//tags/list', methods=['GET']) @parse_repository_name() diff --git a/endpoints/v2/v2auth.py b/endpoints/v2/v2auth.py index 0d9e8ffb0..747404b88 100644 --- a/endpoints/v2/v2auth.py +++ b/endpoints/v2/v2auth.py @@ -13,10 +13,10 @@ from auth.permissions import (ModifyRepositoryPermission, ReadRepositoryPermissi from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp from endpoints.v2.errors import InvalidLogin, NameInvalid, InvalidRequest, Unsupported, Unauthorized -from data.interfaces.v2 import pre_oci_model as model +from endpoints.v2.models_pre_oci import data_model as model from util.cache import no_cache from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX -from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, QUAY_TUF_ROOT, +from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT) logger = logging.getLogger(__name__) @@ -188,7 +188,7 @@ def generate_registry_jwt(auth_result): def get_tuf_root(repo, namespace, reponame): if not features.SIGNING or repo is None or not repo.trust_enabled: return DISABLED_TUF_ROOT - + # Users with write access to a repo will see signer-rooted TUF metadata if ModifyRepositoryPermission(namespace, reponame).can(): return SIGNER_TUF_ROOT From b1434b0380aff57c3b103c9c406dc4e287c99937 Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Mon, 26 Jun 2017 18:16:15 -0400 Subject: [PATCH 08/22] endpoints.v2: yapf format --- endpoints/v2/__init__.py | 37 ++++++------ endpoints/v2/blob.py | 84 ++++++++++++---------------- endpoints/v2/catalog.py | 6 +- endpoints/v2/errors.py | 96 +++++++++++--------------------- endpoints/v2/manifest.py | 33 ++++++----- endpoints/v2/models_interface.py | 19 ++++--- endpoints/v2/models_pre_oci.py | 67 +++++++++------------- endpoints/v2/tag.py | 4 +- endpoints/v2/v2auth.py | 22 ++++---- 9 files changed, 152 insertions(+), 216 deletions(-) diff --git a/endpoints/v2/__init__.py b/endpoints/v2/__init__.py index a2b95e972..db490de02 100644 --- a/endpoints/v2/__init__.py +++ b/endpoints/v2/__init__.py @@ -12,8 +12,8 @@ import features from app import app, metric_queue, get_app_url, license_validator from auth.auth_context import get_grant_context -from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission, - AdministerRepositoryPermission) +from auth.permissions import ( + ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission) from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers from endpoints.decorators import anon_protect, anon_allowed from endpoints.v2.errors import V2RegistryException, Unauthorized, Unsupported, NameUnknown @@ -23,10 +23,8 @@ from util.metrics.metricqueue import time_blueprint from util.registry.dockerver import docker_version from util.pagination import encrypt_page_token, decrypt_page_token - logger = logging.getLogger(__name__) - v2_bp = Blueprint('v2', __name__) license_validator.enforce_license_before_request(v2_bp) time_blueprint(v2_bp, metric_queue) @@ -34,9 +32,7 @@ time_blueprint(v2_bp, metric_queue) @v2_bp.app_errorhandler(V2RegistryException) def handle_registry_v2_exception(error): - response = jsonify({ - 'errors': [error.as_dict()] - }) + response = jsonify({'errors': [error.as_dict()]}) response.status_code = error.http_status_code if response.status_code == 401: @@ -53,6 +49,7 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset', """ Decorates a handler adding a parsed pagination token and a callback to encode a response token. """ + def wrapper(func): @wraps(func) def wrapped(*args, **kwargs): @@ -86,7 +83,9 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset', kwargs[offset_kwarg_name] = offset kwargs[callback_kwarg_name] = callback return func(*args, **kwargs) + return wrapped + return wrapper @@ -94,17 +93,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False): def wrapper(func): @wraps(func) def wrapped(namespace_name, repo_name, *args, **kwargs): - logger.debug('Checking permission %s for repo: %s/%s', permission_class, - namespace_name, repo_name) + logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace_name, + repo_name) repository = namespace_name + '/' + repo_name repo = model.get_repository(namespace_name, repo_name) if repo is None: raise Unauthorized(repository=repository, scopes=scopes) permission = permission_class(namespace_name, repo_name) - if (permission.can() or - (allow_public and - repo.is_public)): + if (permission.can() or (allow_public and repo.is_public)): if repo.kind != 'image': msg = 'This repository is for managing %s resources and not container images.' % repo.kind raise Unsupported(detail=msg) @@ -112,16 +109,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False): raise Unauthorized(repository=repository, scopes=scopes) return wrapped + return wrapper -require_repo_read = _require_repo_permission(ReadRepositoryPermission, - scopes=['pull'], +require_repo_read = _require_repo_permission(ReadRepositoryPermission, scopes=['pull'], allow_public=True) -require_repo_write = _require_repo_permission(ModifyRepositoryPermission, - scopes=['pull', 'push']) -require_repo_admin = _require_repo_permission(AdministerRepositoryPermission, - scopes=['pull', 'push']) +require_repo_write = _require_repo_permission(ModifyRepositoryPermission, scopes=['pull', 'push']) +require_repo_admin = _require_repo_permission(AdministerRepositoryPermission, scopes=[ + 'pull', 'push']) def get_input_stream(flask_request): @@ -138,7 +134,9 @@ def route_show_if(value): abort(404) return f(*args, **kwargs) + return decorated_function + return decorator @@ -169,5 +167,4 @@ from endpoints.v2 import ( catalog, manifest, tag, - v2auth, -) + v2auth,) diff --git a/endpoints/v2/blob.py b/endpoints/v2/blob.py index d8b361318..f736eaccc 100644 --- a/endpoints/v2/blob.py +++ b/endpoints/v2/blob.py @@ -14,18 +14,16 @@ from digest import digest_tools from endpoints.common import parse_repository_name from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream -from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, - NameUnknown, LayerTooLarge) +from endpoints.v2.errors import ( + BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge) from endpoints.v2.models_pre_oci import data_model as model from util.cache import cache_control from util.registry.filelike import wrap_with_handler, StreamSlice from util.registry.gzipstream import calculate_size_handler from util.registry.torrent import PieceHasher - logger = logging.getLogger(__name__) - BASE_BLOB_ROUTE = '//blobs/' BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN) RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$') @@ -52,8 +50,7 @@ def check_blob_exists(namespace_name, repo_name, digest): headers = { 'Docker-Content-Digest': digest, 'Content-Length': blob.size, - 'Content-Type': BLOB_CONTENT_TYPE, - } + 'Content-Type': BLOB_CONTENT_TYPE,} # If our storage supports range requests, let the client know. if storage.get_supports_resumable_downloads(blob.locations): @@ -102,10 +99,7 @@ def download_blob(namespace_name, repo_name, digest): storage.stream_read(blob.locations, path), headers=headers.update({ 'Content-Length': blob.size, - 'Content-Type': BLOB_CONTENT_TYPE, - }), - ) - + 'Content-Type': BLOB_CONTENT_TYPE,}),) @v2_bp.route('//blobs/uploads/', methods=['POST']) @@ -128,13 +122,13 @@ def start_blob_upload(namespace_name, repo_name): return Response( status=202, headers={ - 'Docker-Upload-UUID': new_upload_uuid, - 'Range': _render_range(0), - 'Location': get_app_url() + url_for('v2.upload_chunk', - repository='%s/%s' % (namespace_name, repo_name), - upload_uuid=new_upload_uuid) - }, - ) + 'Docker-Upload-UUID': + new_upload_uuid, + 'Range': + _render_range(0), + 'Location': + get_app_url() + url_for('v2.upload_chunk', repository='%s/%s' % + (namespace_name, repo_name), upload_uuid=new_upload_uuid)},) # The user plans to send us the entire body right now. # Find the upload. @@ -158,12 +152,11 @@ def start_blob_upload(namespace_name, repo_name): return Response( status=201, headers={ - 'Docker-Content-Digest': digest, - 'Location': get_app_url() + url_for('v2.download_blob', - repository='%s/%s' % (namespace_name, repo_name), - digest=digest), - }, - ) + 'Docker-Content-Digest': + digest, + 'Location': + get_app_url() + url_for('v2.download_blob', repository='%s/%s' % + (namespace_name, repo_name), digest=digest),},) @v2_bp.route('//blobs/uploads/', methods=['GET']) @@ -180,9 +173,8 @@ def fetch_existing_upload(namespace_name, repo_name, upload_uuid): status=204, headers={ 'Docker-Upload-UUID': upload_uuid, - 'Range': _render_range(blob_upload.byte_count+1), # byte ranges are exclusive - }, - ) + 'Range': _render_range(blob_upload.byte_count + 1), # byte ranges are exclusive + },) @v2_bp.route('//blobs/uploads/', methods=['PATCH']) @@ -211,9 +203,7 @@ def upload_chunk(namespace_name, repo_name, upload_uuid): headers={ 'Location': _current_request_url(), 'Range': _render_range(updated_blob_upload.byte_count, with_bytes_prefix=False), - 'Docker-Upload-UUID': upload_uuid, - }, - ) + 'Docker-Upload-UUID': upload_uuid,},) @v2_bp.route('//blobs/uploads/', methods=['PUT']) @@ -242,15 +232,12 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid): _finish_upload(namespace_name, repo_name, updated_blob_upload, digest) # Write the response to the client. - return Response( - status=201, - headers={ - 'Docker-Content-Digest': digest, - 'Location': get_app_url() + url_for('v2.download_blob', - repository='%s/%s' % (namespace_name, repo_name), - digest=digest), - } - ) + return Response(status=201, headers={ + 'Docker-Content-Digest': + digest, + 'Location': + get_app_url() + url_for('v2.download_blob', repository='%s/%s' % + (namespace_name, repo_name), digest=digest),}) @v2_bp.route('//blobs/uploads/', methods=['DELETE']) @@ -300,9 +287,11 @@ def _abort_range_not_satisfiable(valid_end, upload_uuid): TODO(jzelinskie): Unify this with the V2RegistryException class. """ - flask_abort(Response(status=416, headers={'Location': _current_request_url(), - 'Range': '0-{0}'.format(valid_end), - 'Docker-Upload-UUID': upload_uuid})) + flask_abort( + Response(status=416, headers={ + 'Location': _current_request_url(), + 'Range': '0-{0}'.format(valid_end), + 'Docker-Upload-UUID': upload_uuid})) def _parse_range_header(range_header_text): @@ -415,16 +404,15 @@ def _upload_chunk(blob_upload, range_header): length, input_fp, blob_upload.storage_metadata, - content_type=BLOB_CONTENT_TYPE, - ) + content_type=BLOB_CONTENT_TYPE,) if upload_error is not None: logger.error('storage.stream_upload_chunk returned error %s', upload_error) return None # Update the chunk upload time metric. - metric_queue.chunk_upload_time.Observe(time.time() - start_time, - labelvalues=[length_written, list(location_set)[0]]) + metric_queue.chunk_upload_time.Observe(time.time() - start_time, labelvalues=[ + length_written, list(location_set)[0]]) # If we determined an uncompressed size and this is the first chunk, add it to the blob. # Otherwise, we clear the size from the blob as it was uploaded in multiple chunks. @@ -499,8 +487,7 @@ def _finalize_blob_database(namespace_name, repo_name, blob_upload, digest, alre repo_name, digest, blob_upload, - app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'], - ) + app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],) # If it doesn't already exist, create the BitTorrent pieces for the blob. if blob_upload.piece_sha_state is not None and not already_existed: @@ -521,5 +508,4 @@ def _finish_upload(namespace_name, repo_name, blob_upload, digest): repo_name, blob_upload, digest, - _finalize_blob_storage(blob_upload, digest), - ) + _finalize_blob_storage(blob_upload, digest),) diff --git a/endpoints/v2/catalog.py b/endpoints/v2/catalog.py index 01720bce4..096dd1d15 100644 --- a/endpoints/v2/catalog.py +++ b/endpoints/v2/catalog.py @@ -7,6 +7,7 @@ from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp, paginate from endpoints.v2.models_pre_oci import data_model as model + @v2_bp.route('/_catalog', methods=['GET']) @process_registry_jwt_auth() @anon_protect @@ -18,12 +19,11 @@ def catalog_search(limit, offset, pagination_callback): username = entity.user.username include_public = bool(features.PUBLIC_CATALOG) - visible_repositories = model.get_visible_repositories(username, limit+1, offset, + visible_repositories = model.get_visible_repositories(username, limit + 1, offset, include_public=include_public) response = jsonify({ 'repositories': ['%s/%s' % (repo.namespace_name, repo.name) - for repo in visible_repositories][0:limit], - }) + for repo in visible_repositories][0:limit],}) pagination_callback(len(visible_repositories), response) return response diff --git a/endpoints/v2/errors.py b/endpoints/v2/errors.py index 127424ca1..0ae998106 100644 --- a/endpoints/v2/errors.py +++ b/endpoints/v2/errors.py @@ -1,8 +1,9 @@ import bitmath + class V2RegistryException(Exception): - def __init__(self, error_code_str, message, detail, http_status_code=400, - repository=None, scopes=None): + def __init__(self, error_code_str, message, detail, http_status_code=400, repository=None, + scopes=None): super(V2RegistryException, self).__init__(message) self.http_status_code = http_status_code self.repository = repository @@ -15,104 +16,81 @@ class V2RegistryException(Exception): return { 'code': self._error_code_str, 'message': self.message, - 'detail': self._detail if self._detail is not None else {}, - } + 'detail': self._detail if self._detail is not None else {},} class BlobUnknown(V2RegistryException): def __init__(self, detail=None): - super(BlobUnknown, self).__init__('BLOB_UNKNOWN', - 'blob unknown to registry', - detail, - 404) + super(BlobUnknown, self).__init__('BLOB_UNKNOWN', 'blob unknown to registry', detail, 404) class BlobUploadInvalid(V2RegistryException): def __init__(self, detail=None): - super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID', - 'blob upload invalid', - detail) + super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID', 'blob upload invalid', detail) class BlobUploadUnknown(V2RegistryException): def __init__(self, detail=None): super(BlobUploadUnknown, self).__init__('BLOB_UPLOAD_UNKNOWN', - 'blob upload unknown to registry', - detail, - 404) + 'blob upload unknown to registry', detail, 404) class DigestInvalid(V2RegistryException): def __init__(self, detail=None): super(DigestInvalid, self).__init__('DIGEST_INVALID', - 'provided digest did not match uploaded content', - detail) + 'provided digest did not match uploaded content', detail) class ManifestBlobUnknown(V2RegistryException): def __init__(self, detail=None): super(ManifestBlobUnknown, self).__init__('MANIFEST_BLOB_UNKNOWN', - 'manifest blob unknown to registry', - detail) + 'manifest blob unknown to registry', detail) class ManifestInvalid(V2RegistryException): def __init__(self, detail=None, http_status_code=400): - super(ManifestInvalid, self).__init__('MANIFEST_INVALID', - 'manifest invalid', - detail, + super(ManifestInvalid, self).__init__('MANIFEST_INVALID', 'manifest invalid', detail, http_status_code) class ManifestUnknown(V2RegistryException): def __init__(self, detail=None): - super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN', - 'manifest unknown', - detail, - 404) + super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN', 'manifest unknown', detail, 404) class ManifestUnverified(V2RegistryException): def __init__(self, detail=None): super(ManifestUnverified, self).__init__('MANIFEST_UNVERIFIED', - 'manifest failed signature verification', - detail) + 'manifest failed signature verification', detail) class NameInvalid(V2RegistryException): def __init__(self, detail=None, message=None): - super(NameInvalid, self).__init__('NAME_INVALID', - message or 'invalid repository name', - detail) + super(NameInvalid, self).__init__('NAME_INVALID', message or 'invalid repository name', detail) class NameUnknown(V2RegistryException): def __init__(self, detail=None): - super(NameUnknown, self).__init__('NAME_UNKNOWN', - 'repository name not known to registry', - detail, - 404) + super(NameUnknown, self).__init__('NAME_UNKNOWN', 'repository name not known to registry', + detail, 404) class SizeInvalid(V2RegistryException): def __init__(self, detail=None): super(SizeInvalid, self).__init__('SIZE_INVALID', - 'provided length did not match content length', - detail) + 'provided length did not match content length', detail) class TagAlreadyExists(V2RegistryException): def __init__(self, detail=None): - super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS', - 'tag was already pushed', - detail, + super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS', 'tag was already pushed', detail, 409) + class TagInvalid(V2RegistryException): def __init__(self, detail=None): - super(TagInvalid, self).__init__('TAG_INVALID', - 'manifest tag did not match URI', - detail) + super(TagInvalid, self).__init__('TAG_INVALID', 'manifest tag did not match URI', detail) + class LayerTooLarge(V2RegistryException): def __init__(self, uploaded=None, max_allowed=None): @@ -123,43 +101,33 @@ class LayerTooLarge(V2RegistryException): detail = { 'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_allowed), 'max_allowed': max_allowed, - 'uploaded': uploaded, - } + 'uploaded': uploaded,} up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}") max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}") - message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str, max_str) + message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str, + max_str) + class Unauthorized(V2RegistryException): def __init__(self, detail=None, repository=None, scopes=None): - super(Unauthorized, self).__init__('UNAUTHORIZED', - 'access to the requested resource is not authorized', - detail, - 401, - repository=repository, - scopes=scopes) + super(Unauthorized, + self).__init__('UNAUTHORIZED', 'access to the requested resource is not authorized', + detail, 401, repository=repository, scopes=scopes) class Unsupported(V2RegistryException): def __init__(self, detail=None, message=None): - super(Unsupported, self).__init__('UNSUPPORTED', - message or 'The operation is unsupported.', - detail, - 405) + super(Unsupported, self).__init__('UNSUPPORTED', message or 'The operation is unsupported.', + detail, 405) class InvalidLogin(V2RegistryException): def __init__(self, message=None): - super(InvalidLogin, self).__init__('UNAUTHORIZED', - message or 'Specified credentials are invalid', - {}, - 401) - + super(InvalidLogin, self).__init__('UNAUTHORIZED', message or + 'Specified credentials are invalid', {}, 401) class InvalidRequest(V2RegistryException): def __init__(self, message=None): - super(InvalidRequest, self).__init__('INVALID_REQUEST', - message or 'Invalid request', - {}, - 400) \ No newline at end of file + super(InvalidRequest, self).__init__('INVALID_REQUEST', message or 'Invalid request', {}, 400) diff --git a/endpoints/v2/manifest.py b/endpoints/v2/manifest.py index 09c70e169..5d480472b 100644 --- a/endpoints/v2/manifest.py +++ b/endpoints/v2/manifest.py @@ -25,14 +25,13 @@ from util.names import VALID_TAG_PATTERN from util.registry.replication import queue_replication_batch from util.validation import is_json - logger = logging.getLogger(__name__) - BASE_MANIFEST_ROUTE = '//manifests/' MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN) MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN) + @v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET']) @parse_repository_name() @process_registry_jwt_auth(scopes=['pull']) @@ -52,14 +51,14 @@ def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref): repo = model.get_repository(namespace_name, repo_name) if repo is not None: track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01, - tag=manifest_ref) + tag=manifest_ref) metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True]) return Response( manifest.json, status=200, - headers={'Content-Type': manifest.media_type, 'Docker-Content-Digest': manifest.digest}, - ) + headers={'Content-Type': manifest.media_type, + 'Docker-Content-Digest': manifest.digest},) @v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET']) @@ -78,8 +77,9 @@ def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref): track_and_log('pull_repo', repo, manifest_digest=manifest_ref) metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True]) - return Response(manifest.json, status=200, headers={'Content-Type': manifest.media_type, - 'Docker-Content-Digest': manifest.digest}) + return Response(manifest.json, status=200, headers={ + 'Content-Type': manifest.media_type, + 'Docker-Content-Digest': manifest.digest}) def _reject_manifest2_schema2(func): @@ -89,6 +89,7 @@ def _reject_manifest2_schema2(func): raise ManifestInvalid(detail={'message': 'manifest schema version not supported'}, http_status_code=415) return func(*args, **kwargs) + return wrapped @@ -131,8 +132,7 @@ def write_manifest_by_digest(namespace_name, repo_name, manifest_ref): def _write_manifest(namespace_name, repo_name, manifest): - if (manifest.namespace == '' and - features.LIBRARY_SUPPORT and + if (manifest.namespace == '' and features.LIBRARY_SUPPORT and namespace_name == app.config['LIBRARY_NAMESPACE']): pass elif manifest.namespace != namespace_name: @@ -174,8 +174,7 @@ def _write_manifest(namespace_name, repo_name, manifest): rewritten_image.comment, rewritten_image.command, rewritten_image.compat_json, - rewritten_image.parent_image_id, - ) + rewritten_image.parent_image_id,) except ManifestException as me: logger.exception("exception when rewriting v1 metadata") raise ManifestInvalid(detail={'message': 'failed synthesizing v1 metadata: %s' % me.message}) @@ -212,12 +211,11 @@ def _write_manifest_and_log(namespace_name, repo_name, manifest): 'OK', status=202, headers={ - 'Docker-Content-Digest': manifest.digest, - 'Location': url_for('v2.fetch_manifest_by_digest', - repository='%s/%s' % (namespace_name, repo_name), - manifest_ref=manifest.digest), - }, - ) + 'Docker-Content-Digest': + manifest.digest, + 'Location': + url_for('v2.fetch_manifest_by_digest', repository='%s/%s' % (namespace_name, repo_name), + manifest_ref=manifest.digest),},) @v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE']) @@ -271,5 +269,6 @@ def _generate_and_store_manifest(namespace_name, repo_name, tag_name): manifest.bytes) return manifest + def _determine_media_type(value): media_type_name = 'application/json' if is_json(value) else 'text/plain' diff --git a/endpoints/v2/models_interface.py b/endpoints/v2/models_interface.py index 1904871f3..bbfd51b2c 100644 --- a/endpoints/v2/models_interface.py +++ b/endpoints/v2/models_interface.py @@ -5,8 +5,9 @@ from namedlist import namedlist from six import add_metaclass -class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', - 'is_public', 'kind', 'trust_enabled'])): +class Repository( + namedtuple('Repository', [ + 'id', 'name', 'namespace_name', 'description', 'is_public', 'kind', 'trust_enabled'])): """ Repository represents a namespaced collection of tags. :type id: int @@ -18,6 +19,7 @@ class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'desc :type trust_enabled: bool """ + class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])): """ ManifestJSON represents a Manifest of any format. @@ -30,10 +32,10 @@ class Tag(namedtuple('Tag', ['name', 'repository'])): """ -class BlobUpload(namedlist('BlobUpload', ['uuid', 'byte_count', 'uncompressed_byte_count', - 'chunk_count', 'sha_state', 'location_name', - 'storage_metadata', 'piece_sha_state', 'piece_hashes', - 'repo_namespace_name', 'repo_name'])): +class BlobUpload( + namedlist('BlobUpload', [ + 'uuid', 'byte_count', 'uncompressed_byte_count', 'chunk_count', 'sha_state', 'location_name', + 'storage_metadata', 'piece_sha_state', 'piece_hashes', 'repo_namespace_name', 'repo_name'])): """ BlobUpload represents the current state of an Blob being uploaded. """ @@ -50,6 +52,7 @@ class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'name RepositoryReference represents a reference to a Repository, without its full metadata. """ + class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])): """ Label represents a key-value pair that describes a particular Manifest. @@ -178,7 +181,8 @@ class DockerRegistryV2DataInterface(object): pass @abstractmethod - def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata): + def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, + storage_metadata): """ Creates a blob upload under the matching repository with the given UUID and metadata. Returns whether the matching repository exists. @@ -246,7 +250,6 @@ class DockerRegistryV2DataInterface(object): """ pass - @abstractmethod def get_blob_path(self, blob): """ diff --git a/endpoints/v2/models_pre_oci.py b/endpoints/v2/models_pre_oci.py index 3a963b3fb..a241c7259 100644 --- a/endpoints/v2/models_pre_oci.py +++ b/endpoints/v2/models_pre_oci.py @@ -9,11 +9,9 @@ from endpoints.v2.models_interface import ( ManifestJSON, Repository, RepositoryReference, - Tag, -) + Tag,) from image.docker.v1 import DockerV1Metadata - _MEDIA_TYPE = "application/vnd.docker.distribution.manifest.v1+prettyjws" @@ -22,6 +20,7 @@ class PreOCIModel(DockerRegistryV2DataInterface): PreOCIModel implements the data model for the v2 Docker Registry protocol using a database schema before it was changed to support the OCI specification. """ + def create_repository(self, namespace_name, repo_name, creating_user=None): return model.repository.create_repository(namespace_name, repo_name, creating_user) @@ -54,14 +53,10 @@ class PreOCIModel(DockerRegistryV2DataInterface): def delete_manifest_by_digest(self, namespace_name, repo_name, digest): def _tag_view(tag): - return Tag( - name=tag.name, - repository=RepositoryReference( - id=tag.repository_id, - name=repo_name, - namespace_name=namespace_name, - ) - ) + return Tag(name=tag.name, repository=RepositoryReference( + id=tag.repository_id, + name=repo_name, + namespace_name=namespace_name,)) tags = model.tag.delete_manifest_by_digest(namespace_name, repo_name, digest) return [_tag_view(tag) for tag in tags] @@ -79,8 +74,9 @@ class PreOCIModel(DockerRegistryV2DataInterface): return {} images_query = model.image.lookup_repository_images(repo, docker_image_ids) - return {image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image) - for image in images_query} + return { + image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image) + for image in images_query} def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id): repo_image = model.image.get_repo_image(namespace_name, repo_name, docker_image_id) @@ -122,21 +118,16 @@ class PreOCIModel(DockerRegistryV2DataInterface): def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id, manifest_digest, manifest_bytes): - (_, newly_created) = model.tag.store_tag_manifest(namespace_name, repo_name, tag_name, - leaf_layer_docker_id, manifest_digest, - manifest_bytes) + (_, newly_created) = model.tag.store_tag_manifest( + namespace_name, repo_name, tag_name, leaf_layer_docker_id, manifest_digest, manifest_bytes) return newly_created def repository_tags(self, namespace_name, repo_name, limit, offset): def _tag_view(tag): - return Tag( - name=tag.name, - repository=RepositoryReference( - id=tag.repository_id, - name=repo_name, - namespace_name=namespace_name, - ) - ) + return Tag(name=tag.name, repository=RepositoryReference( + id=tag.repository_id, + name=repo_name, + namespace_name=namespace_name,)) tags_query = model.tag.list_repository_tags(namespace_name, repo_name) tags_query = tags_query.limit(limit).offset(offset) @@ -151,7 +142,8 @@ class PreOCIModel(DockerRegistryV2DataInterface): query = query.limit(limit).offset(offset) return [_repository_for_repo(repo) for repo in query] - def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata): + def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, + storage_metadata): try: model.blob.initiate_upload(namespace_name, repo_name, upload_uuid, location_name, storage_metadata) @@ -176,8 +168,7 @@ class PreOCIModel(DockerRegistryV2DataInterface): piece_sha_state=found.piece_sha_state, piece_hashes=found.piece_hashes, location_name=found.location.name, - storage_metadata=found.storage_metadata, - ) + storage_metadata=found.storage_metadata,) def update_blob_upload(self, blob_upload): # Lookup the blob upload object. @@ -206,17 +197,14 @@ class PreOCIModel(DockerRegistryV2DataInterface): def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload, expiration_sec): location_obj = model.storage.get_image_location_for_name(blob_upload.location_name) - blob_record = model.blob.store_blob_record_and_temp_link(namespace_name, repo_name, - blob_digest, location_obj.id, - blob_upload.byte_count, - expiration_sec, - blob_upload.uncompressed_byte_count) + blob_record = model.blob.store_blob_record_and_temp_link( + namespace_name, repo_name, blob_digest, location_obj.id, blob_upload.byte_count, + expiration_sec, blob_upload.uncompressed_byte_count) return Blob( uuid=blob_record.uuid, digest=blob_digest, size=blob_upload.byte_count, - locations=[blob_upload.location_name], - ) + locations=[blob_upload.location_name],) def lookup_blobs_by_digest(self, namespace_name, repo_name, digests): def _blob_view(blob_record): @@ -224,7 +212,7 @@ class PreOCIModel(DockerRegistryV2DataInterface): uuid=blob_record.uuid, digest=blob_record.content_checksum, size=blob_record.image_size, - locations=None, # Note: Locations is None in this case. + locations=None, # Note: Locations is None in this case. ) repo = model.repository.get_repository(namespace_name, repo_name) @@ -240,8 +228,7 @@ class PreOCIModel(DockerRegistryV2DataInterface): uuid=blob_record.uuid, digest=digest, size=blob_record.image_size, - locations=blob_record.locations, - ) + locations=blob_record.locations,) except model.BlobDoesNotExist: return None @@ -282,8 +269,7 @@ def _docker_v1_metadata(namespace_name, repo_name, repo_image): comment=repo_image.comment, command=repo_image.command, # TODO: make sure this isn't needed anywhere, as it is expensive to lookup - parent_image_id=None, - ) + parent_image_id=None,) def _repository_for_repo(repo): @@ -295,8 +281,7 @@ def _repository_for_repo(repo): description=repo.description, is_public=model.repository.is_repository_public(repo), kind=model.repository.get_repo_kind_name(repo), - trust_enabled=repo.trust_enabled, - ) + trust_enabled=repo.trust_enabled,) data_model = PreOCIModel() diff --git a/endpoints/v2/tag.py b/endpoints/v2/tag.py index 9c0e81b02..776663520 100644 --- a/endpoints/v2/tag.py +++ b/endpoints/v2/tag.py @@ -6,6 +6,7 @@ from endpoints.decorators import anon_protect from endpoints.v2 import v2_bp, require_repo_read, paginate from endpoints.v2.models_pre_oci import data_model as model + @v2_bp.route('//tags/list', methods=['GET']) @parse_repository_name() @process_registry_jwt_auth(scopes=['pull']) @@ -16,8 +17,7 @@ def list_all_tags(namespace_name, repo_name, limit, offset, pagination_callback) tags = model.repository_tags(namespace_name, repo_name, limit, offset) response = jsonify({ 'name': '{0}/{1}'.format(namespace_name, repo_name), - 'tags': [tag.name for tag in tags], - }) + 'tags': [tag.name for tag in tags],}) pagination_callback(len(tags), response) return response diff --git a/endpoints/v2/v2auth.py b/endpoints/v2/v2auth.py index 747404b88..7eb08e1bb 100644 --- a/endpoints/v2/v2auth.py +++ b/endpoints/v2/v2auth.py @@ -16,15 +16,15 @@ from endpoints.v2.errors import InvalidLogin, NameInvalid, InvalidRequest, Unsup from endpoints.v2.models_pre_oci import data_model as model from util.cache import no_cache from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX -from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, QUAY_TUF_ROOT, - SIGNER_TUF_ROOT, DISABLED_TUF_ROOT) +from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, + QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT) logger = logging.getLogger(__name__) - TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)*[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$' + @lru_cache(maxsize=1) def get_scope_regex(): hostname = re.escape(app.config['SERVER_HOSTNAME']) @@ -64,8 +64,7 @@ def generate_registry_jwt(auth_result): access = [] user_event_data = { - 'action': 'login', - } + 'action': 'login',} tuf_root = DISABLED_TUF_ROOT if len(scope_param) > 0: @@ -101,8 +100,8 @@ def generate_registry_jwt(auth_result): repo_is_public = repo is not None and repo.is_public invalid_repo_message = '' if repo is not None and repo.kind != 'image': - invalid_repo_message = (('This repository is for managing %s resources ' + - 'and not container images.') % repo.kind) + invalid_repo_message = (( + 'This repository is for managing %s resources ' + 'and not container images.') % repo.kind) if 'push' in actions: # If there is no valid user or token, then the repository cannot be @@ -150,8 +149,7 @@ def generate_registry_jwt(auth_result): access.append({ 'type': 'repository', 'name': registry_and_repo, - 'actions': final_actions, - }) + 'actions': final_actions,}) # Set the user event data for the auth. if 'push' in final_actions: @@ -164,8 +162,7 @@ def generate_registry_jwt(auth_result): user_event_data = { 'action': user_action, 'repository': reponame, - 'namespace': namespace, - } + 'namespace': namespace,} tuf_root = get_tuf_root(repo, namespace, reponame) elif user is None and token is None: @@ -179,7 +176,8 @@ def generate_registry_jwt(auth_result): event.publish_event_data('docker-cli', user_event_data) # Build the signed JWT. - context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken, tuf_root=tuf_root) + context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken, + tuf_root=tuf_root) token = generate_bearer_token(audience_param, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, instance_keys) return jsonify({'token': token}) From 8c03a6be31858bacb08a4f97b08ea96869b3ffb3 Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Fri, 23 Jun 2017 14:52:43 -0700 Subject: [PATCH 09/22] cor-tab-panel emits first registered tab ID if active tab is undefined adding e2e tests for cor-tabs --- .../cor-tab-pane.component.spec.ts | 25 ++++++++- .../cor-tab-pane/cor-tab-pane.component.ts | 11 ++-- .../cor-tab-panel.component.spec.ts | 29 +++++++++-- .../cor-tab-panel/cor-tab-panel.component.ts | 7 ++- .../cor-tab/cor-tab.component.spec.ts | 21 +++++--- .../ui/cor-tabs/cor-tab/cor-tab.component.ts | 23 +++++---- .../ui/cor-tabs/cor-tabs.view-object.ts | 9 ++++ static/test/e2e/image-repo.scenario.ts | 51 +++++++++++++++++++ static/test/protractor.conf.ts | 5 +- 9 files changed, 153 insertions(+), 28 deletions(-) create mode 100644 static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts create mode 100644 static/test/e2e/image-repo.scenario.ts diff --git a/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.spec.ts b/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.spec.ts index 46441848a..a86296d93 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.spec.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.spec.ts @@ -12,11 +12,12 @@ describe("CorTabPaneComponent", () => { beforeEach(() => { activeTab = new BehaviorSubject(null); - spyOn(activeTab, "subscribe").and.returnValue(null); + spyOn(activeTab, "subscribe").and.callThrough(); panelMock = new Mock(); panelMock.setup(mock => mock.activeTab).is(activeTab); component = new CorTabPaneComponent(panelMock.Object); + component.id = 'description'; }); describe("ngOnInit", () => { @@ -36,5 +37,27 @@ describe("CorTabPaneComponent", () => { expect((panelMock.Object.activeTab.subscribe)).toHaveBeenCalled(); }); + + it("does nothing if active tab ID is undefined", () => { + component.ngOnInit(); + component.isActiveTab = true; + panelMock.Object.activeTab.next(null); + + expect(component.isActiveTab).toEqual(true); + }); + + it("sets self as active if active tab ID matches tab ID", () => { + component.ngOnInit(); + panelMock.Object.activeTab.next(component.id); + + expect(component.isActiveTab).toEqual(true); + }); + + it("sets self as inactive if active tab ID does not match tab ID", () => { + component.ngOnInit(); + panelMock.Object.activeTab.next(component.id.split('').reverse().join('')); + + expect(component.isActiveTab).toEqual(false); + }); }); }); diff --git a/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.ts b/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.ts index 95867aa3b..5f64c7add 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab-pane/cor-tab-pane.component.ts @@ -1,5 +1,6 @@ import { Component, Input, Inject, Host, OnInit } from 'ng-metadata/core'; import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component'; +import 'rxjs/add/operator/filter'; /** @@ -16,7 +17,7 @@ export class CorTabPaneComponent implements OnInit { @Input('@') public id: string; - private isActiveTab: boolean = false; + public isActiveTab: boolean = false; constructor(@Host() @Inject(CorTabPanelComponent) private panel: CorTabPanelComponent) { @@ -25,8 +26,10 @@ export class CorTabPaneComponent implements OnInit { public ngOnInit(): void { this.panel.addTabPane(this); - this.panel.activeTab.subscribe((tabId: string) => { - this.isActiveTab = (this.id === tabId); - }); + this.panel.activeTab + .filter(tabId => tabId != undefined) + .subscribe((tabId: string) => { + this.isActiveTab = (this.id === tabId); + }); } } diff --git a/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.spec.ts b/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.spec.ts index c26634472..e0a616d92 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.spec.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.spec.ts @@ -12,9 +12,16 @@ describe("CorTabPanelComponent", () => { }); describe("ngOnInit", () => { + var tabs: CorTabComponent[] = []; beforeEach(() => { - spyOn(component.activeTab, "subscribe").and.returnValue(null); + // Add tabs to panel + tabs.push(new CorTabComponent(component)); + tabs[0].tabId = "info"; + tabs.forEach((tab) => component.addTab(tab)); + + spyOn(component.activeTab, "subscribe").and.callThrough(); + spyOn(component.activeTab, "next").and.callThrough(); spyOn(component.tabChange, "emit").and.returnValue(null); }); @@ -24,12 +31,26 @@ describe("CorTabPanelComponent", () => { expect(component.activeTab.subscribe).toHaveBeenCalled(); }); - it("emits output event for tab change when ", () => { + it("emits next active tab with tab ID of first registered tab if given tab ID is null", () => { + component.ngOnInit(); + component.activeTab.next(null); + + expect((component.activeTab.next).calls.argsFor(1)[0]).toEqual(tabs[0].tabId); + }); + + it("does not emit output event for tab change if tab ID is null", () => { + component.ngOnInit(); + component.activeTab.next(null); + + expect((component.tabChange.emit).calls.allArgs).not.toContain(null); + }); + + it("emits output event for tab change when tab ID is not null", () => { component.ngOnInit(); const tabId: string = "description"; - (component.activeTab.subscribe).calls.argsFor(0)[0](tabId); + component.activeTab.next(tabId); - expect((component.tabChange.emit).calls.argsFor(0)[0]).toEqual(tabId); + expect((component.tabChange.emit).calls.argsFor(1)[0]).toEqual(tabId); }); }); diff --git a/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.ts b/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.ts index 3d3321054..01a1740a6 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab-panel/cor-tab-panel.component.ts @@ -28,7 +28,12 @@ export class CorTabPanelComponent implements OnInit, OnChanges { public ngOnInit(): void { this.activeTab.subscribe((tabId: string) => { - this.tabChange.emit(tabId); + // Catch null values and replace with tabId of first tab + if (!tabId && this.tabs[0]) { + this.activeTab.next(this.tabs[0].tabId); + } else { + this.tabChange.emit(tabId); + } }); } diff --git a/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.spec.ts b/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.spec.ts index ead20c980..6f471beb1 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.spec.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.spec.ts @@ -12,7 +12,7 @@ describe("CorTabComponent", () => { beforeEach(() => { activeTab = new BehaviorSubject(null); - spyOn(activeTab, "subscribe").and.returnValue(null); + spyOn(activeTab, "subscribe").and.callThrough(); panelMock = new Mock(); panelMock.setup(mock => mock.activeTab).is(activeTab); @@ -35,16 +35,25 @@ describe("CorTabComponent", () => { expect((panelMock.Object.activeTab.subscribe)).toHaveBeenCalled(); }); + it("does nothing if active tab ID is undefined", () => { + component.ngOnInit(); + panelMock.Object.activeTab.next(null); + + expect(component.tabInit.emit).not.toHaveBeenCalled(); + expect(component.tabShow.emit).not.toHaveBeenCalled(); + expect(component.tabHide.emit).not.toHaveBeenCalled(); + }); + it("emits output event for tab init if it is new active tab", () => { component.ngOnInit(); - (panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId); + panelMock.Object.activeTab.next(component.tabId); expect(component.tabInit.emit).toHaveBeenCalled(); }); it("emits output event for tab show if it is new active tab", () => { component.ngOnInit(); - (panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId); + panelMock.Object.activeTab.next(component.tabId); expect(component.tabShow.emit).toHaveBeenCalled(); }); @@ -53,8 +62,8 @@ describe("CorTabComponent", () => { const newTabId: string = component.tabId.split('').reverse().join(''); component.ngOnInit(); // Call twice, first time to set 'isActive' to true - (panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](component.tabId); - (panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](newTabId); + panelMock.Object.activeTab.next(component.tabId); + panelMock.Object.activeTab.next(newTabId); expect(component.tabHide.emit).toHaveBeenCalled(); }); @@ -62,7 +71,7 @@ describe("CorTabComponent", () => { it("does not emit output event for tab hide if was not previously active tab", () => { const newTabId: string = component.tabId.split('').reverse().join(''); component.ngOnInit(); - (panelMock.Object.activeTab.subscribe).calls.argsFor(0)[0](newTabId); + panelMock.Object.activeTab.next(newTabId); expect(component.tabHide.emit).not.toHaveBeenCalled(); }); diff --git a/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.ts b/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.ts index 2bb832194..0fc76f7fd 100644 --- a/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.ts +++ b/static/js/directives/ui/cor-tabs/cor-tab/cor-tab.component.ts @@ -1,5 +1,6 @@ import { Component, Input, Output, Inject, EventEmitter, Host, OnInit } from 'ng-metadata/core'; import { CorTabPanelComponent } from '../cor-tab-panel/cor-tab-panel.component'; +import 'rxjs/add/operator/filter'; /** @@ -28,16 +29,18 @@ export class CorTabComponent implements OnInit { } public ngOnInit(): void { - this.panel.activeTab.subscribe((tabId: string) => { - if (!this.isActive && this.tabId === tabId) { - this.isActive = true; - this.tabInit.emit({}); - this.tabShow.emit({}); - } else if (this.isActive && this.tabId !== tabId) { - this.isActive = false; - this.tabHide.emit({}); - } - }); + this.panel.activeTab + .filter(tabId => tabId != undefined) + .subscribe((tabId: string) => { + if (!this.isActive && this.tabId === tabId) { + this.isActive = true; + this.tabInit.emit({}); + this.tabShow.emit({}); + } else if (this.isActive && this.tabId !== tabId) { + this.isActive = false; + this.tabHide.emit({}); + } + }); this.panel.addTab(this); } diff --git a/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts b/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts new file mode 100644 index 000000000..f34e51cd5 --- /dev/null +++ b/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts @@ -0,0 +1,9 @@ +import { element, by, browser, $, ElementFinder, ExpectedConditions as until } from 'protractor'; + + +export class CorTabsViewObject { + + public selectTabByTitle(title: string): Promise { + return Promise.resolve($(`cor-tab[tab-title="${title}"] a`).click()); + } +} diff --git a/static/test/e2e/image-repo.scenario.ts b/static/test/e2e/image-repo.scenario.ts new file mode 100644 index 000000000..9e9122334 --- /dev/null +++ b/static/test/e2e/image-repo.scenario.ts @@ -0,0 +1,51 @@ +import { browser, element, by, $, $$ } from 'protractor'; +import { appHost } from '../protractor.conf'; +import { CorTabsViewObject } from '../../js/directives/ui/cor-tabs/cor-tabs.view-object'; + + +describe("Image Repository", () => { + const username = 'devtable'; + const password = 'password'; + const repoTabs: CorTabsViewObject = new CorTabsViewObject(); + + beforeAll((done) => { + browser.waitForAngularEnabled(false); + + // Sign in + browser.get(appHost); + $$('a[href="/signin/"]').get(1).click(); + $('#signin-username').sendKeys(username); + $('#signin-password').sendKeys(password); + element(by.partialButtonText('Sign in')).click(); + browser.sleep(4000); + + // Navigate to image repository + browser.get(`${appHost}/repository/devtable/simple`).then(() => done()); + }); + + afterAll(() => { + browser.waitForAngularEnabled(true); + }); + + describe("information tab", () => { + + beforeAll((done) => { + repoTabs.selectTabByTitle('Information').then(() => done()); + }); + + it("displays repository description", () => { + expect(element(by.cssContainingText('h4', 'Description')).isDisplayed()).toBe(true); + }); + }); + + describe("tags tab", () => { + + beforeAll((done) => { + repoTabs.selectTabByTitle('Tags').then(() => done()); + }); + + it("displays repository description", () => { + expect(element(by.cssContainingText('h4', 'Description')).isDisplayed()).toBe(true); + }); + }); +}); diff --git a/static/test/protractor.conf.ts b/static/test/protractor.conf.ts index 9016c1743..1f59fdd12 100644 --- a/static/test/protractor.conf.ts +++ b/static/test/protractor.conf.ts @@ -20,7 +20,7 @@ export const config: Config = { framework: 'jasmine', seleniumAddress: 'http://localhost:4444/wd/hub', // Uncomment to run tests against local Chrome instance - // directConnect: true, + directConnect: true, capabilities: { browserName: 'chrome', chromeOptions: { @@ -61,6 +61,7 @@ export const config: Config = { }, specs: [ './e2e/sanity.scenario.ts', - './e2e/trigger-creation.scenario.ts' + // './e2e/trigger-creation.scenario.ts', + './e2e/image-repo.scenario.ts', ], }; From f78e1fb679a2ea45141f5d6afc7bb3347add268e Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Mon, 26 Jun 2017 16:43:18 -0700 Subject: [PATCH 10/22] added end-to-end tests for cor-tabs in image repo view --- .../ui/cor-tabs/cor-tabs.view-object.ts | 4 + static/test/e2e/image-repo.scenario.ts | 85 ++++++++++++++++++- static/test/protractor.conf.ts | 2 +- 3 files changed, 86 insertions(+), 5 deletions(-) diff --git a/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts b/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts index f34e51cd5..c07ea281f 100644 --- a/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts +++ b/static/js/directives/ui/cor-tabs/cor-tabs.view-object.ts @@ -6,4 +6,8 @@ export class CorTabsViewObject { public selectTabByTitle(title: string): Promise { return Promise.resolve($(`cor-tab[tab-title="${title}"] a`).click()); } + + public isActiveTab(title: string): Promise { + return Promise.resolve($(`cor-tab[tab-title="${title}"] .cor-tab-itself.active`).isPresent()); + } } diff --git a/static/test/e2e/image-repo.scenario.ts b/static/test/e2e/image-repo.scenario.ts index 9e9122334..ff000e3e0 100644 --- a/static/test/e2e/image-repo.scenario.ts +++ b/static/test/e2e/image-repo.scenario.ts @@ -28,24 +28,101 @@ describe("Image Repository", () => { }); describe("information tab", () => { + const tabTitle: string = 'Information'; beforeAll((done) => { - repoTabs.selectTabByTitle('Information').then(() => done()); + repoTabs.selectTabByTitle(tabTitle).then(() => done()); }); it("displays repository description", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); expect(element(by.cssContainingText('h4', 'Description')).isDisplayed()).toBe(true); }); }); describe("tags tab", () => { + const tabTitle: string = 'Tags'; beforeAll((done) => { - repoTabs.selectTabByTitle('Tags').then(() => done()); + repoTabs.selectTabByTitle(tabTitle).then(() => done()); }); - it("displays repository description", () => { - expect(element(by.cssContainingText('h4', 'Description')).isDisplayed()).toBe(true); + it("displays repository tags", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); + expect(element(by.cssContainingText('.tab-header', 'Repository Tags')).isDisplayed()).toBe(true); + }); + }); + + describe("tag history tab", () => { + const tabTitle: string = 'Tag History'; + + beforeAll((done) => { + repoTabs.selectTabByTitle(tabTitle).then(() => done()); + }); + + it("displays repository tags", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); + expect(element(by.cssContainingText('.tab-header', 'Tag History')).isDisplayed()).toBe(true); + }); + }); + + describe("builds tab", () => { + const tabTitle: string = 'Builds'; + + beforeAll((done) => { + repoTabs.selectTabByTitle(tabTitle).then(() => done()); + }); + + it("displays repository tags", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); + expect(element(by.cssContainingText('.tab-header', 'Repository Builds')).isDisplayed()).toBe(true); + }); + }); + + describe("usage logs tab", () => { + const tabTitle: string = 'Usage Logs'; + + beforeAll((done) => { + repoTabs.selectTabByTitle(tabTitle).then(() => done()); + }); + + it("displays repository tags", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); + expect(element(by.cssContainingText('h3', 'Usage Logs')).isDisplayed()).toBe(true); + }); + }); + + describe("settings tab", () => { + const tabTitle: string = 'Settings'; + + beforeAll((done) => { + repoTabs.selectTabByTitle(tabTitle).then(() => done()); + }); + + it("displays repository tags", () => { + expect(repoTabs.isActiveTab(tabTitle)).toBe(true); + expect(element(by.cssContainingText('.tab-header', 'Settings')).isDisplayed()).toBe(true); + }); + }); + + describe("tabs navigation", () => { + + beforeAll((done) => { + repoTabs.selectTabByTitle('Information'); + repoTabs.selectTabByTitle('Tags'); + done(); + }); + + it("back button returns to previous tab", () => { + browser.navigate().back(); + + expect(repoTabs.isActiveTab('Information')).toBe(true); + }); + + it("forward button returns to next tab", () => { + browser.navigate().forward(); + + expect(repoTabs.isActiveTab('Tags')).toBe(true); }); }); }); diff --git a/static/test/protractor.conf.ts b/static/test/protractor.conf.ts index 1f59fdd12..e69f9753d 100644 --- a/static/test/protractor.conf.ts +++ b/static/test/protractor.conf.ts @@ -60,7 +60,7 @@ export const config: Config = { browser.close(); }, specs: [ - './e2e/sanity.scenario.ts', + // './e2e/sanity.scenario.ts', // './e2e/trigger-creation.scenario.ts', './e2e/image-repo.scenario.ts', ], From 11659f73bf2ecd10cd03e3f130ef0973d27bc5d7 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Tue, 27 Jun 2017 15:26:40 +0300 Subject: [PATCH 11/22] Fix log for reenabling a notification We forgot to log the event and method names --- data/model/notification.py | 3 ++- endpoints/api/repositorynotification.py | 10 ++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/data/model/notification.py b/data/model/notification.py index ef8f40aec..3769d52cd 100644 --- a/data/model/notification.py +++ b/data/model/notification.py @@ -139,8 +139,9 @@ def reset_notification_number_of_failures(namespace_name, repository_name, uuid) notification.repository.name != repository_name): raise InvalidNotificationException('No repository notification found with uuid: %s' % uuid) reset_number_of_failures_to_zero(notification.id) + return notification except RepositoryNotification.DoesNotExist: - pass + return None def reset_number_of_failures_to_zero(notification_id): diff --git a/endpoints/api/repositorynotification.py b/endpoints/api/repositorynotification.py index 9b3e5ae30..ab81d2c5b 100644 --- a/endpoints/api/repositorynotification.py +++ b/endpoints/api/repositorynotification.py @@ -161,10 +161,12 @@ class RepositoryNotification(RepositoryParamResource): @disallow_for_app_repositories def post(self, namespace, repository, uuid): """ Resets repository notification to 0 failures. """ - model.notification.reset_notification_number_of_failures(namespace, repository, uuid) - log_action('reset_repo_notification', namespace, - {'repo': repository, 'namespace': namespace, 'notification_id': uuid}, - repo=model.repository.get_repository(namespace, repository)) + reset = model.notification.reset_notification_number_of_failures(namespace, repository, uuid) + if reset is not None: + log_action('reset_repo_notification', namespace, + {'repo': repository, 'namespace': namespace, 'notification_id': uuid, + 'event': reset.event.name, 'method': reset.method.name}, + repo=model.repository.get_repository(namespace, repository)) return 'No Content', 204 From 41e7e559a6f73f105a3f7d12174320d1df8ac3a9 Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Mon, 19 Jun 2017 23:17:42 -0700 Subject: [PATCH 12/22] added TSLint for TypeScript code style checking, fixed associated errors --- package.json | 6 +- .../js/decorators/inject/inject.decorator.ts | 2 +- .../quay-require/quay-require.directive.ts | 2 +- .../app-public-view.component.ts | 12 +- .../ui/channel-icon/channel-icon.component.ts | 2 +- .../clipboard-copy.directive.ts | 2 +- .../directives/ui/cor-tabs/cor-tabs.module.ts | 2 +- .../duration-input.component.ts | 12 +- .../linear-workflow.component.ts | 2 +- .../manage-trigger.component.ts | 7 +- .../manage-trigger.view-object.ts | 9 +- .../ui/markdown/markdown-editor.component.ts | 6 +- .../repository-signing-config.component.ts | 6 +- .../ui/search-box/search-box.component.ts | 4 +- .../tag-signing-display.component.ts | 27 +- .../time-machine-settings.component.ts | 6 +- .../ui/typeahead/typeahead.directive.ts | 51 +- .../visibility-indicator.component.ts | 5 +- static/js/quay-config.module.ts | 3 +- static/js/quay-run.ts | 7 +- static/js/quay.module.ts | 5 +- .../js/services/avatar/avatar.service.impl.ts | 2 +- static/js/services/avatar/avatar.service.ts | 2 +- .../js/services/build/build.service.impl.ts | 5 +- static/js/services/build/build.service.ts | 2 +- .../datafile/datafile.service.impl.ts | 8 +- .../js/services/datafile/datafile.service.ts | 2 +- .../dockerfile/dockerfile.service.impl.ts | 6 +- .../services/dockerfile/dockerfile.service.ts | 2 +- static/js/services/page/page.service.impl.ts | 12 +- static/js/services/page/page.service.ts | 23 +- .../route-builder.service.impl.ts | 6 +- .../route-builder/route-builder.service.ts | 2 +- .../services/util/util.service.impl.spec.ts | 40 -- static/js/services/util/util.service.impl.ts | 39 -- static/js/services/util/util.service.ts | 19 - .../js/services/view-array/view-array.impl.ts | 2 +- static/js/services/view-array/view-array.ts | 2 +- tsconfig.json | 7 +- tslint.json | 30 +- typings.json | 6 - webpack.config.js | 2 +- yarn.lock | 586 +++--------------- 43 files changed, 253 insertions(+), 730 deletions(-) delete mode 100644 static/js/services/util/util.service.impl.spec.ts delete mode 100644 static/js/services/util/util.service.impl.ts delete mode 100644 static/js/services/util/util.service.ts delete mode 100644 typings.json diff --git a/package.json b/package.json index 11ffa8c8e..9d36e33a3 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,8 @@ "test:node": "JASMINE_CONFIG_PATH=static/test/jasmine.json ./node_modules/.bin/jasmine-ts './static/js/**/*.spec.ts'", "e2e": "./node_modules/.bin/ts-node ./node_modules/.bin/protractor static/test/protractor.conf.ts", "build": "NODE_ENV=production ./node_modules/.bin/webpack --progress", - "watch": "./node_modules/.bin/webpack --watch" + "watch": "./node_modules/.bin/webpack --watch", + "lint": "./node_modules/.bin/tslint --type-check -p tsconfig.json -e **/*.spec.ts" }, "repository": { "type": "git", @@ -54,7 +55,6 @@ "@types/react-dom": "0.14.17", "@types/showdown": "^1.4.32", "angular-mocks": "1.6.2", - "angular-ts-decorators": "0.0.19", "css-loader": "0.25.0", "html-loader": "^0.4.5", "jasmine-core": "^2.5.2", @@ -73,8 +73,8 @@ "ts-loader": "^0.9.5", "ts-mocks": "^0.2.2", "ts-node": "^3.0.6", + "tslint": "^5.4.3", "typescript": "^2.2.1", - "typings": "1.4.0", "webpack": "^2.2" } } diff --git a/static/js/decorators/inject/inject.decorator.ts b/static/js/decorators/inject/inject.decorator.ts index dadc20a58..bada5f9c5 100644 --- a/static/js/decorators/inject/inject.decorator.ts +++ b/static/js/decorators/inject/inject.decorator.ts @@ -7,5 +7,5 @@ export function Inject(value: string) { return (target: any, propertyKey: string | symbol, parameterIndex: number): void => { target.$inject = target.$inject = []; target.$inject[parameterIndex] = value; - } + }; } diff --git a/static/js/directives/structural/quay-require/quay-require.directive.ts b/static/js/directives/structural/quay-require/quay-require.directive.ts index 32b15341a..4ac1add29 100644 --- a/static/js/directives/structural/quay-require/quay-require.directive.ts +++ b/static/js/directives/structural/quay-require/quay-require.directive.ts @@ -38,4 +38,4 @@ export class QuayRequireDirective implements AfterContentInit { this.$transclude ]); } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/app-public-view/app-public-view.component.ts b/static/js/directives/ui/app-public-view/app-public-view.component.ts index 104249a66..7326e1c21 100644 --- a/static/js/directives/ui/app-public-view/app-public-view.component.ts +++ b/static/js/directives/ui/app-public-view/app-public-view.component.ts @@ -9,7 +9,9 @@ import { Input, Component, Inject } from 'ng-metadata/core'; templateUrl: '/static/js/directives/ui/app-public-view/app-public-view.component.html' }) export class AppPublicViewComponent { + @Input('<') public repository: any; + private settingsShown: number = 0; private logsShown: number = 0; @@ -17,11 +19,6 @@ export class AppPublicViewComponent { this.updateDescription = this.updateDescription.bind(this); } - private updateDescription(content: string) { - this.repository.description = content; - this.repository.put(); - } - public showSettings(): void { this.settingsShown++; } @@ -29,4 +26,9 @@ export class AppPublicViewComponent { public showLogs(): void { this.logsShown++; } + + private updateDescription(content: string) { + this.repository.description = content; + this.repository.put(); + } } diff --git a/static/js/directives/ui/channel-icon/channel-icon.component.ts b/static/js/directives/ui/channel-icon/channel-icon.component.ts index 6474238c8..444c08a4f 100644 --- a/static/js/directives/ui/channel-icon/channel-icon.component.ts +++ b/static/js/directives/ui/channel-icon/channel-icon.component.ts @@ -44,4 +44,4 @@ export class ChannelIconComponent { var num: number = parseInt(hash.substr(0, 4)); return this.colors[num % this.colors.length]; } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/clipboard-copy/clipboard-copy.directive.ts b/static/js/directives/ui/clipboard-copy/clipboard-copy.directive.ts index 0be17118b..f1beea58e 100644 --- a/static/js/directives/ui/clipboard-copy/clipboard-copy.directive.ts +++ b/static/js/directives/ui/clipboard-copy/clipboard-copy.directive.ts @@ -60,4 +60,4 @@ export class ClipboardCopyDirective implements AfterContentInit, OnDestroy { this.clipboard.destroy(); } } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/cor-tabs/cor-tabs.module.ts b/static/js/directives/ui/cor-tabs/cor-tabs.module.ts index 02570a1b0..f8cee4619 100644 --- a/static/js/directives/ui/cor-tabs/cor-tabs.module.ts +++ b/static/js/directives/ui/cor-tabs/cor-tabs.module.ts @@ -1,4 +1,4 @@ -import { NgModule } from 'ng-metadata/core' +import { NgModule } from 'ng-metadata/core'; import { CorTabsComponent } from './cor-tabs.component'; import { CorTabComponent } from './cor-tab/cor-tab.component'; import { CorNavTabsDirective } from './cor-nav-tabs/cor-nav-tabs.directive'; diff --git a/static/js/directives/ui/duration-input/duration-input.component.ts b/static/js/directives/ui/duration-input/duration-input.component.ts index 937a1e456..de4858bd1 100644 --- a/static/js/directives/ui/duration-input/duration-input.component.ts +++ b/static/js/directives/ui/duration-input/duration-input.component.ts @@ -1,6 +1,7 @@ -import { Input, Output, Component, Inject } from 'ng-metadata/core'; +import { Input, Component, Inject } from 'ng-metadata/core'; import * as moment from "moment"; + /** * A component that allows for selecting a time duration. */ @@ -9,6 +10,7 @@ import * as moment from "moment"; templateUrl: '/static/js/directives/ui/duration-input/duration-input.component.html' }) export class DurationInputComponent implements ng.IComponentController { + @Input('<') public min: string; @Input('<') public max: string; @Input('=?') public value: string; @@ -17,7 +19,7 @@ export class DurationInputComponent implements ng.IComponentController { private min_s: number; private max_s: number; - constructor (@Inject('$scope') private $scope: ng.IScope) { + constructor(@Inject('$scope') private $scope: ng.IScope) { } @@ -33,7 +35,7 @@ export class DurationInputComponent implements ng.IComponentController { } private updateValue(): void { - this.value = this.seconds + 's'; + this.value = `${this.seconds}s`; } private refresh(): void { @@ -41,8 +43,8 @@ export class DurationInputComponent implements ng.IComponentController { this.max_s = this.toSeconds(this.max || '1h'); if (this.value) { - this.seconds = this.toSeconds(this.value || '0s') - }; + this.seconds = this.toSeconds(this.value || '0s'); + } } private durationExplanation(durationSeconds: string): string { diff --git a/static/js/directives/ui/linear-workflow/linear-workflow.component.ts b/static/js/directives/ui/linear-workflow/linear-workflow.component.ts index 1cb82aa56..719eddf37 100644 --- a/static/js/directives/ui/linear-workflow/linear-workflow.component.ts +++ b/static/js/directives/ui/linear-workflow/linear-workflow.component.ts @@ -75,4 +75,4 @@ export class LinearWorkflowComponent { export type SectionInfo = { index: number; component: LinearWorkflowSectionComponent; -} +}; diff --git a/static/js/directives/ui/manage-trigger/manage-trigger.component.ts b/static/js/directives/ui/manage-trigger/manage-trigger.component.ts index f478c9968..9b4797533 100644 --- a/static/js/directives/ui/manage-trigger/manage-trigger.component.ts +++ b/static/js/directives/ui/manage-trigger/manage-trigger.component.ts @@ -17,7 +17,9 @@ export class ManageTriggerComponent implements OnChanges { @Input('<') public githost: string = 'custom-git'; @Input('<') public repository: Repository; @Input('<') public trigger: Trigger; + @Output() public activateTrigger: EventEmitter<{config: TriggerConfig, pull_robot?: any}> = new EventEmitter(); + public config: TriggerConfig; public local: Local = { selectedRepository: {name: ''}, @@ -28,6 +30,7 @@ export class ManageTriggerComponent implements OnChanges { repositoryOptions: {filter: '', predicate: 'score', reverse: false, page: 0, hideStale: true}, robotOptions: {filter: '', predicate: 'score', reverse: false, page: 0}, }; + private namespacesPerPage: number = 10; private repositoriesPerPage: number = 10; private robotsPerPage: number = 10; @@ -174,7 +177,7 @@ export class ManageTriggerComponent implements OnChanges { } private setPossibleContexts(path: string) { - if (this.local.dockerfileLocations.contextMap){ + if (this.local.dockerfileLocations.contextMap) { this.local.contexts = this.local.dockerfileLocations.contextMap[path] || []; } else { this.local.contexts = [path.split('/').slice(0, -1).join('/').concat('/')]; @@ -288,7 +291,7 @@ export class ManageTriggerComponent implements OnChanges { const kind = ref.kind == 'branch' ? 'heads' : 'tags'; const icon = ref.kind == 'branch' ? 'fa-code-fork' : 'fa-tag'; return { - 'value': kind + '/' + ref.name, + 'value': `${kind}/${ref.name}`, 'icon': icon, 'title': ref.name }; diff --git a/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts b/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts index 3998cfb27..a0ec1b875 100644 --- a/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts +++ b/static/js/directives/ui/manage-trigger/manage-trigger.view-object.ts @@ -16,10 +16,13 @@ export class ManageTriggerViewObject { private customGitRepoInput: ElementFinder = element(by.model('$ctrl.buildSource')); private dockerfileLocationInput: ElementFinder = this.sections['dockerfilelocation'].$('input'); - private dockerfileLocationDropdownButton: ElementFinder = this.sections['dockerfilelocation'].$('button[data-toggle=dropdown'); + private dockerfileLocationDropdownButton: ElementFinder = this.sections['dockerfilelocation'] + .$('button[data-toggle=dropdown'); private dockerContextInput: ElementFinder = this.sections['contextlocation'].$('input'); - private dockerContextDropdownButton: ElementFinder = this.sections['contextlocation'].$('button[data-toggle=dropdown'); - private robotAccountOptions: ElementFinder = this.sections['robot'].element(by.repeater('$ctrl.orderedData.visibleEntries')); + private dockerContextDropdownButton: ElementFinder = this.sections['contextlocation'] + .$('button[data-toggle=dropdown'); + private robotAccountOptions: ElementFinder = this.sections['robot'] + .element(by.repeater('$ctrl.orderedData.visibleEntries')); public continue(): Promise { return Promise.resolve(element(by.buttonText('Continue')).click()); diff --git a/static/js/directives/ui/markdown/markdown-editor.component.ts b/static/js/directives/ui/markdown/markdown-editor.component.ts index 97833ac17..8cdf50a85 100644 --- a/static/js/directives/ui/markdown/markdown-editor.component.ts +++ b/static/js/directives/ui/markdown/markdown-editor.component.ts @@ -16,8 +16,10 @@ export class MarkdownEditorComponent { @Input('<') public content: string; @Output() public save: EventEmitter<{editedContent: string}> = new EventEmitter(); @Output() public discard: EventEmitter = new EventEmitter(); + // Textarea is public for testability, should not be directly accessed @ViewChild('#markdown-textarea') public textarea: ng.IAugmentedJQuery; + private editMode: EditMode = "write"; constructor(@Inject('$document') private $document: ng.IDocumentService, @@ -115,9 +117,9 @@ export class MarkdownEditorComponent { private insertText(text: string, startPos: number, endPos: number): void { if (this.browserPlatform === 'firefox') { // FIXME: Ctrl-Z highlights previous text - this.textarea.val(this.textarea.val().substr(0, startPos) + + this.textarea.val(this.textarea.val().substr(0, startPos) + text + - this.textarea.val().substr(endPos, this.textarea.val().length)); + this.textarea.val().substr(endPos, this.textarea.val().length)); } else { // TODO: Test other platforms (IE...) diff --git a/static/js/directives/ui/repository-signing-config/repository-signing-config.component.ts b/static/js/directives/ui/repository-signing-config/repository-signing-config.component.ts index fc9b15e22..67bdbd776 100644 --- a/static/js/directives/ui/repository-signing-config/repository-signing-config.component.ts +++ b/static/js/directives/ui/repository-signing-config/repository-signing-config.component.ts @@ -1,6 +1,7 @@ import { Input, Component, Inject } from 'ng-metadata/core'; import { Repository } from '../../../types/common.types'; + /** * A component that displays the configuration and options for repository signing. */ @@ -9,12 +10,13 @@ import { Repository } from '../../../types/common.types'; templateUrl: '/static/js/directives/ui/repository-signing-config/repository-signing-config.component.html', }) export class RepositorySigningConfigComponent { + @Input('<') public repository: Repository; private enableTrustInfo: {[key: string]: string} = null; private disableTrustInfo: {[key: string]: string} = null; - constructor (@Inject("ApiService") private ApiService: any) { + constructor(@Inject("ApiService") private ApiService: any) { } @@ -41,4 +43,4 @@ export class RepositorySigningConfigComponent { callback(true); }, errorDisplay); } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/search-box/search-box.component.ts b/static/js/directives/ui/search-box/search-box.component.ts index 597019d76..a2ea0281e 100644 --- a/static/js/directives/ui/search-box/search-box.component.ts +++ b/static/js/directives/ui/search-box/search-box.component.ts @@ -43,7 +43,7 @@ export class SearchBoxComponent { private onSelected($event): void { this.autocompleteSelected = true; this.$timeout(() => { - this.$location.url($event['result']['href']) + this.$location.url($event['result']['href']); }, 100); } @@ -54,4 +54,4 @@ export class SearchBoxComponent { this.$location.search('q', $event['value']); }, 10); } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/tag-signing-display/tag-signing-display.component.ts b/static/js/directives/ui/tag-signing-display/tag-signing-display.component.ts index f6852e407..04fcf9b78 100644 --- a/static/js/directives/ui/tag-signing-display/tag-signing-display.component.ts +++ b/static/js/directives/ui/tag-signing-display/tag-signing-display.component.ts @@ -2,6 +2,7 @@ import { Input, Component, Inject } from 'ng-metadata/core'; import { ApostilleDelegationsSet, ApostilleSignatureDocument, ApostilleTagDocument } from '../../../types/common.types'; import * as moment from "moment"; + type TagSigningInfo = { delegations: DelegationInfo[]; delegationsByName: {[delegationName: string]: DelegationInfo}; @@ -9,7 +10,8 @@ type TagSigningInfo = { hasExpiringSoon: boolean; hasExpired: boolean; hasInvalid: boolean; -} +}; + type DelegationInfo = { delegationName: string; @@ -20,7 +22,9 @@ type DelegationInfo = { isExpiringSoon: boolean }; -var RELEASES = ['targets/releases', 'targets']; + +const RELEASES = ['targets/releases', 'targets']; + /** * A component that displays the signing status of a tag in the repository view. @@ -30,13 +34,16 @@ var RELEASES = ['targets/releases', 'targets']; templateUrl: '/static/js/directives/ui/tag-signing-display/tag-signing-display.component.html', }) export class TagSigningDisplayComponent { + @Input('<') public compact: boolean; @Input('<') public tag: any; @Input('<') public delegations: ApostilleDelegationsSet; private cachedSigningInfo: TagSigningInfo | null = null; - constructor(@Inject("$sanitize") private $sanitize: ng.sanitize.ISanitizeService) {} + constructor(@Inject("$sanitize") private $sanitize: ng.sanitize.ISanitizeService) { + + } private base64ToHex(base64String: string): string { // Based on: http://stackoverflow.com/questions/39460182/decode-base64-to-hexadecimal-string-with-javascript @@ -49,13 +56,15 @@ export class TagSigningDisplayComponent { var hexString = ''; for (var i = 0; i < raw.length; ++i) { var char = raw.charCodeAt(i); - var hex = char.toString(16) + var hex = char.toString(16); hexString += (hex.length == 2 ? hex : '0' + hex); } return hexString; } - private buildDelegationInfo(tag: any, delegationName: string, delegation: ApostilleSignatureDocument): DelegationInfo { + private buildDelegationInfo(tag: any, + delegationName: string, + delegation: ApostilleSignatureDocument): DelegationInfo { var digest_without_prefix = tag.manifest_digest.substr('sha256:'.length); var hex_signature = this.base64ToHex(delegation.targets[tag.name].hashes['sha256']); @@ -70,7 +79,7 @@ export class TagSigningDisplayComponent { 'delegationHash': hex_signature, 'isExpired': expires.isSameOrBefore(now), 'isExpiringSoon': !expires.isSameOrBefore(now) && expires.isSameOrBefore(withOneWeek), - } + }; } private buildTagSigningInfo(tag: any, delegationSet: ApostilleDelegationsSet): TagSigningInfo { @@ -80,13 +89,13 @@ export class TagSigningDisplayComponent { 'hasExpired': false, 'hasExpiringSoon': false, 'hasInvalid': false, - } + }; // Find all delegations containing the tag as a target. Object.keys(delegationSet.delegations).forEach((delegationName) => { var delegation = delegationSet.delegations[delegationName]; if (delegation.targets[tag.name]) { - var DelegationInfo = this.buildDelegationInfo(tag, delegationName, delegation) + var DelegationInfo = this.buildDelegationInfo(tag, delegationName, delegation); info.delegations.push(DelegationInfo); info.delegationsByName[delegationName] = DelegationInfo; @@ -173,4 +182,4 @@ export class TagSigningDisplayComponent { return 'invalid-signed'; } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/time-machine-settings/time-machine-settings.component.ts b/static/js/directives/ui/time-machine-settings/time-machine-settings.component.ts index 092ec6a16..5dfbaf29d 100644 --- a/static/js/directives/ui/time-machine-settings/time-machine-settings.component.ts +++ b/static/js/directives/ui/time-machine-settings/time-machine-settings.component.ts @@ -1,6 +1,7 @@ import { Input, Component, Inject } from 'ng-metadata/core'; import * as moment from "moment"; + /** * A component that displays settings for a namespace for time machine. */ @@ -9,6 +10,7 @@ import * as moment from "moment"; templateUrl: '/static/js/directives/ui/time-machine-settings/time-machine-settings.component.html' }) export class TimeMachineSettingsComponent implements ng.IComponentController { + @Input('<') public user: any; @Input('<') public organization: any; @@ -16,7 +18,7 @@ export class TimeMachineSettingsComponent implements ng.IComponentController { private current_s: number; private updating: boolean; - constructor (@Inject('Config') private Config: any, @Inject('ApiService') private ApiService: any, + constructor(@Inject('Config') private Config: any, @Inject('ApiService') private ApiService: any, @Inject('Features') private Features: any) { this.current_s = 0; this.initial_s = 0; @@ -51,7 +53,7 @@ export class TimeMachineSettingsComponent implements ng.IComponentController { this.updating = true; var errorDisplay = this.ApiService.errorDisplay('Could not update time machine setting', () => { this.updating = false; - }) + }); var method = (this.user ? this.ApiService.changeUserDetails : this.ApiService.changeOrganizationDetails); diff --git a/static/js/directives/ui/typeahead/typeahead.directive.ts b/static/js/directives/ui/typeahead/typeahead.directive.ts index 8b494636c..487aac8eb 100644 --- a/static/js/directives/ui/typeahead/typeahead.directive.ts +++ b/static/js/directives/ui/typeahead/typeahead.directive.ts @@ -1,6 +1,7 @@ import { Input, Output, Directive, Inject, AfterContentInit, EventEmitter, HostListener } from 'ng-metadata/core'; import * as $ from 'jquery'; + /** * Directive which decorates an with a typeahead autocomplete. */ @@ -8,15 +9,15 @@ import * as $ from 'jquery'; selector: '[typeahead]', }) export class TypeaheadDirective implements AfterContentInit { - @Output('typeahead') typeahead = new EventEmitter(); - @Input('taDisplayKey') displayKey: string = ''; - @Input('taSuggestionTmpl') suggestionTemplate: string = ''; - @Input('taClearOnSelect') clearOnSelect: boolean = false; - @Input('taDebounce') debounce: number = 250; + @Input('taDisplayKey') public displayKey: string = ''; + @Input('taSuggestionTmpl') public suggestionTemplate: string = ''; + @Input('taClearOnSelect') public clearOnSelect: boolean = false; + @Input('taDebounce') public debounce: number = 250; - @Output('taSelected') selected = new EventEmitter(); - @Output('taEntered') entered = new EventEmitter(); + @Output('typeahead') public typeahead = new EventEmitter(); + @Output('taSelected') public selected = new EventEmitter(); + @Output('taEntered') public entered = new EventEmitter(); private itemSelected: boolean = false; private existingTimer: ng.IPromise = null; @@ -28,10 +29,25 @@ export class TypeaheadDirective implements AfterContentInit { @Inject('$timeout') private $timeout: ng.ITimeoutService) { } + @HostListener('keyup', ['$event']) + public onKeyup(event: JQueryKeyEventObject): void { + if (!this.itemSelected && event.keyCode == 13) { + this.entered.emit({ + 'value': $(this.$element).typeahead('val'), + 'callback': (reset: boolean) => { + if (reset) { + this.itemSelected = false; + $(this.$element).typeahead('val', ''); + } + } + }); + } + } + public ngAfterContentInit(): void { var templates = null; if (this.suggestionTemplate) { - templates = {} + templates = {}; if (this.suggestionTemplate) { templates['suggestion'] = this.buildTemplateHandler(this.suggestionTemplate); @@ -42,7 +58,7 @@ export class TypeaheadDirective implements AfterContentInit { if (this.clearOnSelect) { $(this.$element).typeahead('val', ''); } - this.selected.emit({'result': suggestion}) + this.selected.emit({'result': suggestion}); this.itemSelected = true; }); @@ -72,21 +88,6 @@ export class TypeaheadDirective implements AfterContentInit { }, this.debounce); } - @HostListener('keyup', ['$event']) - public onKeyup(event: JQueryKeyEventObject): void { - if (!this.itemSelected && event.keyCode == 13) { - this.entered.emit({ - 'value': $(this.$element).typeahead('val'), - 'callback': (reset: boolean) => { - if (reset) { - this.itemSelected = false; - $(this.$element).typeahead('val', ''); - } - } - }); - } - } - private buildTemplateHandler(templateUrl: string): Function { return (value) => { var resultDiv = document.createElement('div'); @@ -101,4 +102,4 @@ export class TypeaheadDirective implements AfterContentInit { return resultDiv; }; } -} \ No newline at end of file +} diff --git a/static/js/directives/ui/visibility-indicator/visibility-indicator.component.ts b/static/js/directives/ui/visibility-indicator/visibility-indicator.component.ts index b5fb32940..fdcf2d4e4 100644 --- a/static/js/directives/ui/visibility-indicator/visibility-indicator.component.ts +++ b/static/js/directives/ui/visibility-indicator/visibility-indicator.component.ts @@ -10,9 +10,6 @@ import { Input, Component } from 'ng-metadata/core'; templateUrl: '/static/js/directives/ui/visibility-indicator/visibility-indicator.component.html' }) export class VisibilityIndicatorComponent { + @Input('<') public repository: any; - - constructor() { - - } } diff --git a/static/js/quay-config.module.ts b/static/js/quay-config.module.ts index b58334937..171f2e0b9 100644 --- a/static/js/quay-config.module.ts +++ b/static/js/quay-config.module.ts @@ -84,7 +84,8 @@ function provideConfig($provide: ng.auto.IProvideService, var tooltipFactory: any = $tooltipProvider.$get[$tooltipProvider.$get.length - 1]; $tooltipProvider.$get[$tooltipProvider.$get.length - 1] = function($window: ng.IWindowService) { if ('ontouchstart' in $window) { - var existing: any = tooltipFactory.apply(this, arguments); + const existing: any = tooltipFactory.apply(this, arguments); + return function(element) { // Note: We only disable bs-tooltip's themselves. $tooltip is used for other things // (such as the datepicker), so we need to be specific when canceling it. diff --git a/static/js/quay-run.ts b/static/js/quay-run.ts index 3001f97c1..561a2cca9 100644 --- a/static/js/quay-run.ts +++ b/static/js/quay-run.ts @@ -45,7 +45,8 @@ export function provideRun($rootScope: QuayRunScope, return true; } - const invalid_token: boolean = response.data['title'] == 'invalid_token' || response.data['error_type'] == 'invalid_token'; + const invalid_token: boolean = response.data['title'] == 'invalid_token' || + response.data['error_type'] == 'invalid_token'; if (response !== undefined && response.status == 401 && invalid_token && @@ -92,7 +93,7 @@ export function provideRun($rootScope: QuayRunScope, } }); - $rootScope.$on('$routeChangeSuccess', function (event, current, previous) { + $rootScope.$on('$routeChangeSuccess', function(event, current, previous) { $rootScope.current = current.$$route; $rootScope.currentPage = current; $rootScope.pageClass = ''; @@ -126,7 +127,7 @@ interface QuayRunScope extends ng.IRootScopeService { currentPage: any; current: any; title: any; - description: string, + description: string; pageClass: any; newLayout: any; fixFooter: any; diff --git a/static/js/quay.module.ts b/static/js/quay.module.ts index c274af627..45c1837ce 100644 --- a/static/js/quay.module.ts +++ b/static/js/quay.module.ts @@ -13,7 +13,9 @@ import { CorTableComponent } from './directives/ui/cor-table/cor-table.component import { CorTableColumn } from './directives/ui/cor-table/cor-table-col.component'; import { ChannelIconComponent } from './directives/ui/channel-icon/channel-icon.component'; import { TagSigningDisplayComponent } from './directives/ui/tag-signing-display/tag-signing-display.component'; -import { RepositorySigningConfigComponent } from './directives/ui/repository-signing-config/repository-signing-config.component'; +import { + RepositorySigningConfigComponent +} from './directives/ui/repository-signing-config/repository-signing-config.component'; import { TimeMachineSettingsComponent } from './directives/ui/time-machine-settings/time-machine-settings.component'; import { DurationInputComponent } from './directives/ui/duration-input/duration-input.component'; import { SearchBoxComponent } from './directives/ui/search-box/search-box.component'; @@ -22,7 +24,6 @@ import { BuildServiceImpl } from './services/build/build.service.impl'; import { AvatarServiceImpl } from './services/avatar/avatar.service.impl'; import { DockerfileServiceImpl } from './services/dockerfile/dockerfile.service.impl'; import { DataFileServiceImpl } from './services/datafile/datafile.service.impl'; -import { UtilServiceImpl } from './services/util/util.service.impl'; import { QuayRequireDirective } from './directives/structural/quay-require/quay-require.directive'; import { MarkdownInputComponent } from './directives/ui/markdown/markdown-input.component'; import { MarkdownViewComponent } from './directives/ui/markdown/markdown-view.component'; diff --git a/static/js/services/avatar/avatar.service.impl.ts b/static/js/services/avatar/avatar.service.impl.ts index a9eb54d36..8cc66477f 100644 --- a/static/js/services/avatar/avatar.service.impl.ts +++ b/static/js/services/avatar/avatar.service.impl.ts @@ -47,4 +47,4 @@ export class AvatarServiceImpl implements AvatarService { return this.cache[cacheKey] = hash; } -} \ No newline at end of file +} diff --git a/static/js/services/avatar/avatar.service.ts b/static/js/services/avatar/avatar.service.ts index ec817e9fd..7abec8e08 100644 --- a/static/js/services/avatar/avatar.service.ts +++ b/static/js/services/avatar/avatar.service.ts @@ -19,4 +19,4 @@ export abstract class AvatarService { * @return hash The hash for the avatar image. */ public abstract computeHash(email?: string, name?: string): string; -} \ No newline at end of file +} diff --git a/static/js/services/build/build.service.impl.ts b/static/js/services/build/build.service.impl.ts index 6183ea1f7..5648efe28 100644 --- a/static/js/services/build/build.service.impl.ts +++ b/static/js/services/build/build.service.impl.ts @@ -73,7 +73,8 @@ export class BuildServiceImpl implements BuildService { break; case 'internalerror': - message = 'An internal system error occurred while building; the build will be retried in the next few minutes.'; + message = 'An internal system error occurred while building; ' + + 'the build will be retried in the next few minutes.'; break; case 'cancelled': @@ -86,4 +87,4 @@ export class BuildServiceImpl implements BuildService { return message; } -} \ No newline at end of file +} diff --git a/static/js/services/build/build.service.ts b/static/js/services/build/build.service.ts index d07f9d111..8cdabbfae 100644 --- a/static/js/services/build/build.service.ts +++ b/static/js/services/build/build.service.ts @@ -16,4 +16,4 @@ export abstract class BuildService { * @return buildMessage The message associated with the given phase. */ public abstract getBuildMessage(phase: string): string; -} \ No newline at end of file +} diff --git a/static/js/services/datafile/datafile.service.impl.ts b/static/js/services/datafile/datafile.service.impl.ts index f9bc83ed8..e1e9cfc6f 100644 --- a/static/js/services/datafile/datafile.service.impl.ts +++ b/static/js/services/datafile/datafile.service.impl.ts @@ -86,7 +86,7 @@ export class DataFileServiceImpl implements DataFileService { var zip = null; var zipFiles = null; try { - var zip = new JSZip(buf); + zip = new JSZip(buf); zipFiles = zip.files; } catch (e) { failure(); @@ -164,9 +164,9 @@ export class DataFileServiceImpl implements DataFileService { 'name': this.getName(path), 'path': path, 'canRead': true, - 'toBlob': (function(currentFile) { + 'toBlob': (function(file) { return function() { - return new Blob([currentFile.buffer], {type: 'application/octet-binary'}); + return new Blob([file.buffer], {type: 'application/octet-binary'}); }; }(currentFile)) }); @@ -179,4 +179,4 @@ export class DataFileServiceImpl implements DataFileService { failure(); } } -} \ No newline at end of file +} diff --git a/static/js/services/datafile/datafile.service.ts b/static/js/services/datafile/datafile.service.ts index 47c990d83..e79794694 100644 --- a/static/js/services/datafile/datafile.service.ts +++ b/static/js/services/datafile/datafile.service.ts @@ -45,4 +45,4 @@ export abstract class DataFileService { progress: (percent: number) => void, error: () => void, loaded: (uint8array: Uint8Array) => void): void; -} \ No newline at end of file +} diff --git a/static/js/services/dockerfile/dockerfile.service.impl.ts b/static/js/services/dockerfile/dockerfile.service.impl.ts index 37db16b52..9c065b33f 100644 --- a/static/js/services/dockerfile/dockerfile.service.impl.ts +++ b/static/js/services/dockerfile/dockerfile.service.impl.ts @@ -104,11 +104,11 @@ export class DockerfileInfoImpl implements DockerfileInfo { return null; } - if (baseImage.indexOf(this.config.getDomain() + '/') != 0) { + if (baseImage.indexOf(`${this.config.getDomain()}/`) != 0) { return null; } - return baseImage.substring(this.config.getDomain().length + 1); + return baseImage.substring(this.config.getDomain().length + 1); } public getBaseImage(): string | null { @@ -152,4 +152,4 @@ export class DockerfileInfoImpl implements DockerfileInfo { return baseImageAndTag; } -} \ No newline at end of file +} diff --git a/static/js/services/dockerfile/dockerfile.service.ts b/static/js/services/dockerfile/dockerfile.service.ts index 3c5186b6b..9f5648d11 100644 --- a/static/js/services/dockerfile/dockerfile.service.ts +++ b/static/js/services/dockerfile/dockerfile.service.ts @@ -35,4 +35,4 @@ export abstract class DockerfileInfo { * @return baseImageAndTag The base image and tag. */ public abstract getBaseImageAndTag(): string | null; -} \ No newline at end of file +} diff --git a/static/js/services/page/page.service.impl.ts b/static/js/services/page/page.service.impl.ts index a018bf6ed..4bfd77cbf 100644 --- a/static/js/services/page/page.service.impl.ts +++ b/static/js/services/page/page.service.impl.ts @@ -1,15 +1,11 @@ import { Injectable } from 'ng-metadata/core'; -import { PageService } from './page.service'; +import { PageService, QuayPage, QuayPageProfile } from './page.service'; @Injectable(PageService.name) export class PageServiceImpl implements ng.IServiceProvider { - private pages: any = {}; - - constructor() { - - } + private pages: {[pageName: string]: QuayPage} = {}; public create(pageName: string, templateName: string, @@ -26,8 +22,8 @@ export class PageServiceImpl implements ng.IServiceProvider { } } - public get(pageName: string, profiles: any[]): any[] | null { - for (var i = 0; i < profiles.length; ++i) { + public get(pageName: string, profiles: QuayPageProfile[]): [QuayPageProfile, QuayPage] | null { + for (let i = 0; i < profiles.length; ++i) { var current = profiles[i]; var key = current.id + ':' + pageName; var page = this.pages[key]; diff --git a/static/js/services/page/page.service.ts b/static/js/services/page/page.service.ts index 829959d2a..768453a49 100644 --- a/static/js/services/page/page.service.ts +++ b/static/js/services/page/page.service.ts @@ -22,7 +22,7 @@ export abstract class PageService implements ng.IServiceProvider { * @param pageName The name of the page. * @param profiles Available profiles to search. */ - public abstract get(pageName: string, profiles: any[]): any[] | null; + public abstract get(pageName: string, profiles: QuayPageProfile[]): [QuayPageProfile, QuayPage] | null; /** * Provide the service instance. @@ -30,3 +30,24 @@ export abstract class PageService implements ng.IServiceProvider { */ public abstract $get(): PageService; } + + +/** + * A type representing a registered application page. + */ +export type QuayPage = { + name: string; + controller: ng.IController; + templateName: string, + flags: {[key: string]: any}; +}; + + +/** + * Represents a page profile type. + */ +export type QuayPageProfile = { + id: string; + templatePath: string; +}; + diff --git a/static/js/services/route-builder/route-builder.service.impl.ts b/static/js/services/route-builder/route-builder.service.impl.ts index 901dd7c15..014cbce31 100644 --- a/static/js/services/route-builder/route-builder.service.impl.ts +++ b/static/js/services/route-builder/route-builder.service.impl.ts @@ -1,13 +1,13 @@ import { RouteBuilder } from './route-builder.service'; import { Injectable, Inject } from 'ng-metadata/core'; -import { PageService } from '../page/page.service'; +import { PageService, QuayPage, QuayPageProfile } from '../page/page.service'; @Injectable(RouteBuilder.name) export class RouteBuilderImpl implements RouteBuilder { public currentProfile: string = 'layout'; - public profiles: any[] = [ + public profiles: QuayPageProfile[] = [ // Start with the old pages (if we asked for it). {id: 'old-layout', templatePath: '/static/partials/'}, // Fallback back combined new/existing pages. @@ -50,4 +50,4 @@ export class RouteBuilderImpl implements RouteBuilder { return this; } -} \ No newline at end of file +} diff --git a/static/js/services/route-builder/route-builder.service.ts b/static/js/services/route-builder/route-builder.service.ts index 0d99ab075..3b6a43c24 100644 --- a/static/js/services/route-builder/route-builder.service.ts +++ b/static/js/services/route-builder/route-builder.service.ts @@ -15,4 +15,4 @@ export abstract class RouteBuilder { * @param pagename The name of the page to associate with this route. */ public abstract route(path: string, pagename: string): RouteBuilder; -} \ No newline at end of file +} diff --git a/static/js/services/util/util.service.impl.spec.ts b/static/js/services/util/util.service.impl.spec.ts deleted file mode 100644 index 92748be13..000000000 --- a/static/js/services/util/util.service.impl.spec.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { UtilServiceImpl } from './util.service.impl'; - - -describe("UtilServiceImpl", () => { - var utilServiceImpl: UtilServiceImpl; - var $sanitizeMock: ng.sanitize.ISanitizeService; - - beforeEach(() => { - $sanitizeMock = jasmine.createSpy('$sanitizeSpy').and.returnValue(""); - utilServiceImpl = new UtilServiceImpl($sanitizeMock); - }); - - describe("isAdBlockEnabled", () => { - // TODO - }); - - describe("isEmailAddress", () => { - // TODO - }); - - describe("getMarkedDown", () => { - // TODO - }); - - describe("getFirstMarkdownLineAsText", () => { - // TODO - }); - - describe("escapeHtmlString", () => { - // TODO - }); - - describe("getRestUrl", () => { - // TODO - }); - - describe("textToSafeHtml", () => { - // TODO - }); -}); diff --git a/static/js/services/util/util.service.impl.ts b/static/js/services/util/util.service.impl.ts deleted file mode 100644 index 390b5913c..000000000 --- a/static/js/services/util/util.service.impl.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { Injectable, Inject } from 'ng-metadata/core'; -import { UtilService } from './util.service'; - - -@Injectable(UtilService.name) -export class UtilServiceImpl implements UtilService { - - constructor(@Inject('$sanitize') private $sanitize: ng.sanitize.ISanitizeService) { - - } - - public isAdBlockEnabled(callback: (isEnabled: boolean) => void): void { - - } - - public isEmailAddress(str: string): boolean { - return null; - } - - public getMarkedDown(str: string): string { - return null; - } - - public getFirstMarkdownLineAsText(commentString: string, placeholderNeeded: boolean): string { - return null; - } - - public escapeHtmlString(text: string): string { - return null; - } - - public getRestUrl(args: any[]): string { - return null; - } - - public textToSafeHtml(text: string): string { - return null; - } -} \ No newline at end of file diff --git a/static/js/services/util/util.service.ts b/static/js/services/util/util.service.ts deleted file mode 100644 index 688421a40..000000000 --- a/static/js/services/util/util.service.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Service which exposes various utility methods. - */ -export abstract class UtilService { - - public abstract isAdBlockEnabled(callback: (isEnabled: boolean) => void): void; - - public abstract isEmailAddress(str: string): boolean; - - public abstract getMarkedDown(str: string): string; - - public abstract getFirstMarkdownLineAsText(commentString: string, placeholderNeeded: boolean): string; - - public abstract escapeHtmlString(text: string): string; - - public abstract getRestUrl(args: any[]): string; - - public abstract textToSafeHtml(text: string): string; -} diff --git a/static/js/services/view-array/view-array.impl.ts b/static/js/services/view-array/view-array.impl.ts index 60032dab4..b03f27bf6 100644 --- a/static/js/services/view-array/view-array.impl.ts +++ b/static/js/services/view-array/view-array.impl.ts @@ -93,4 +93,4 @@ export class ViewArrayImpl implements ViewArray { this.timerRef = null; } } -} \ No newline at end of file +} diff --git a/static/js/services/view-array/view-array.ts b/static/js/services/view-array/view-array.ts index 4b7abbd35..ca30436d7 100644 --- a/static/js/services/view-array/view-array.ts +++ b/static/js/services/view-array/view-array.ts @@ -68,4 +68,4 @@ export abstract class ViewArray { * @return viewArray New ViewArray instance. */ public abstract create(): ViewArrayImpl; -} \ No newline at end of file +} diff --git a/tsconfig.json b/tsconfig.json index 58312dc96..605cfdeed 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,22 +1,17 @@ { "compilerOptions": { "baseUrl": ".", - "jsx": "react", "module": "commonjs", "outDir": "./build/", "target": "es5", "lib": ["es2017", "dom"], "experimentalDecorators": true, - "sourceMap": true, - "paths": { - "sass/*": ["./static/css/directives/components/pages/*"] - } + "sourceMap": true }, "exclude": [ "node_modules" ], "include": [ - "./static/js/**/*.tsx", "./static/js/**/*.ts" ] } diff --git a/tslint.json b/tslint.json index 10a4b36d3..c183826ba 100644 --- a/tslint.json +++ b/tslint.json @@ -1,5 +1,29 @@ { - "rules": { - "no-default-export": true + "rules": { + "no-default-export": true, + "member-access": true, + "member-ordering": [true, {"order": "fields-first"}], + "no-empty-interface": true, + "no-namespace": true, + "no-reference": true, + "curly": true, + "no-conditional-assignment": true, + "no-duplicate-super": true, + "no-empty": true, + "no-invalid-template-strings": true, + "no-misused-new": true, + "no-shadowed-variable": true, + "no-unbound-method": true, + "restrict-plus-operands": true, + "eofline": true, + "indent": [true, "spaces", 2], + "max-line-length": [true, 120], + "class-name": true, + "import-spacing": true, + "align": true, + "new-parens": true, + "semicolon": true, + "space-before-function-paren": [true, "never"], + "whitespace": [true, "check-decl", "check-operator", "check-module", "check-separator", "check-type", "check-preblock"] } -} \ No newline at end of file +} diff --git a/typings.json b/typings.json deleted file mode 100644 index b01c490f5..000000000 --- a/typings.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "globalDependencies": { - "react": "registry:dt/react#0.14.0+20160927082313", - "react-dom": "registry:dt/react-dom#0.14.0+20160412154040" - } -} diff --git a/webpack.config.js b/webpack.config.js index 5573f4d04..a179c1ceb 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -21,7 +21,7 @@ var config = { module: { rules: [ { - test: /\.tsx?$/, + test: /\.ts?$/, use: ["ts-loader"], exclude: /node_modules/ }, diff --git a/yarn.lock b/yarn.lock index 61c278bb0..f442cf385 100644 --- a/yarn.lock +++ b/yarn.lock @@ -153,22 +153,10 @@ angular-sanitize@1.6.2: version "1.6.2" resolved "https://registry.yarnpkg.com/angular-sanitize/-/angular-sanitize-1.6.2.tgz#8a327c1acb2c14f50da5b5cad5ea452750a1a375" -angular-ts-decorators@0.0.19: - version "0.0.19" - resolved "https://registry.yarnpkg.com/angular-ts-decorators/-/angular-ts-decorators-0.0.19.tgz#071f6a4f791fe661a91cf3b1925e02fe593076ef" - dependencies: - reflect-metadata "^0.1.8" - angular@1.6.2: version "1.6.2" resolved "https://registry.yarnpkg.com/angular/-/angular-1.6.2.tgz#d0b677242ac4bf9ae81424297c6320973af4bb5a" -ansi-align@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-1.1.0.tgz#2f0c1658829739add5ebb15e6b0c6e3423f016ba" - dependencies: - string-width "^1.0.1" - ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" @@ -177,7 +165,7 @@ ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" -any-promise@^1.0.0, any-promise@^1.1.0, any-promise@^1.3.0: +any-promise@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" @@ -192,10 +180,6 @@ aproba@^1.0.3: version "1.1.1" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.1.tgz#95d3600f07710aa0e9298c726ad5ecf2eacbabab" -archy@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" - are-we-there-yet@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.2.tgz#80e470e95a084794fe1899262c5667c6e88de1b3" @@ -233,7 +217,7 @@ array-union@^1.0.1: dependencies: array-uniq "^1.0.1" -array-uniq@^1.0.1, array-uniq@^1.0.2: +array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" @@ -324,7 +308,7 @@ aws4@^1.2.1: version "1.6.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" -babel-code-frame@^6.11.0: +babel-code-frame@^6.11.0, babel-code-frame@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" dependencies: @@ -340,6 +324,10 @@ balanced-match@^0.4.1, balanced-match@^0.4.2: version "0.4.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" @@ -388,7 +376,7 @@ blocking-proxy@0.0.5: dependencies: minimist "^1.2.0" -bluebird@^3.1.1, bluebird@^3.3.0: +bluebird@^3.3.0: version "3.4.7" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3" @@ -431,20 +419,6 @@ bootstrap@^3.3, bootstrap@^3.3.2: version "3.3.7" resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.3.7.tgz#5a389394549f23330875a3b150656574f8a9eb71" -boxen@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/boxen/-/boxen-0.6.0.tgz#8364d4248ac34ff0ef1b2f2bf49a6c60ce0d81b6" - dependencies: - ansi-align "^1.1.0" - camelcase "^2.1.0" - chalk "^1.1.1" - cli-boxes "^1.0.0" - filled-array "^1.0.0" - object-assign "^4.0.1" - repeating "^2.0.0" - string-width "^1.0.1" - widest-line "^1.0.0" - brace-expansion@^1.0.0: version "1.1.6" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.6.tgz#7197d7eaa9b87e648390ea61fc66c84427420df9" @@ -452,6 +426,13 @@ brace-expansion@^1.0.0: balanced-match "^0.4.1" concat-map "0.0.1" +brace-expansion@^1.1.7: + version "1.1.8" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + braces@^0.1.2: version "0.1.5" resolved "https://registry.yarnpkg.com/braces/-/braces-0.1.5.tgz#c085711085291d8b75fdd74eab0f8597280711e6" @@ -584,7 +565,7 @@ camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" -camelcase@^2.0.0, camelcase@^2.1.0: +camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" @@ -605,10 +586,6 @@ caniuse-db@^1.0.30000346, caniuse-db@^1.0.30000624: version "1.0.30000628" resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000628.tgz#3d010e2a8e2537a8d135792e90e4f2ce0eb838cc" -capture-stack-trace@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d" - caseless@~0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.11.0.tgz#715b96ea9841593cc33067923f5ec60ebda4f7d7" @@ -620,7 +597,7 @@ center-align@^0.1.1: align-text "^0.1.3" lazy-cache "^1.0.3" -chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: +chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" dependencies: @@ -663,10 +640,6 @@ clean-css@4.1.x: dependencies: source-map "0.5.x" -cli-boxes@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" - clipboard@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-1.6.1.tgz#65c5b654812466b0faab82dc6ba0f1d2f8e4be53" @@ -741,17 +714,10 @@ colormin@^1.0.5: css-color-names "0.0.4" has "^1.0.1" -colors@^1.0.3, colors@^1.1.0, colors@~1.1.2: +colors@^1.0.3, colors@^1.1.0, colors@^1.1.2, colors@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" -columnify@^1.5.2: - version "1.5.4" - resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.5.4.tgz#4737ddf1c7b69a8a7c340570782e947eec8e78bb" - dependencies: - strip-ansi "^3.0.0" - wcwidth "^1.0.0" - combine-lists@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/combine-lists/-/combine-lists-1.0.1.tgz#458c07e09e0d900fc28b70a3fec2dacd1d2cb7f6" @@ -790,28 +756,6 @@ concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" -concat-stream@^1.4.7: - version "1.5.0" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.5.0.tgz#53f7d43c51c5e43f81c8fdd03321c631be68d611" - dependencies: - inherits "~2.0.1" - readable-stream "~2.0.0" - typedarray "~0.0.5" - -configstore@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/configstore/-/configstore-2.1.0.tgz#737a3a7036e9886102aa6099e47bb33ab1aba1a1" - dependencies: - dot-prop "^3.0.0" - graceful-fs "^4.1.2" - mkdirp "^0.5.0" - object-assign "^4.0.1" - os-tmpdir "^1.0.0" - osenv "^0.1.0" - uuid "^2.0.1" - write-file-atomic "^1.1.2" - xdg-basedir "^2.0.0" - connect@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/connect/-/connect-3.6.2.tgz#694e8d20681bfe490282c8ab886be98f09f42fe7" @@ -858,12 +802,6 @@ create-ecdh@^4.0.0: bn.js "^4.1.0" elliptic "^6.0.0" -create-error-class@^3.0.1: - version "3.0.2" - resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" - dependencies: - capture-stack-trace "^1.0.0" - create-hash@^1.1.0, create-hash@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.2.tgz#51210062d7bb7479f6c65bb41a92208b1d61abad" @@ -1009,7 +947,7 @@ dateformat@^1.0.6: get-stdin "^4.0.1" meow "^3.3.0" -debug@2, debug@2.6.7, debug@^2.2.0: +debug@2, debug@2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.7.tgz#92bad1f6d05bbb6bba22cca88bcd0ec894c2861e" dependencies: @@ -1039,12 +977,6 @@ deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" -defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" - dependencies: - clone "^1.0.2" - defined@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" @@ -1084,17 +1016,11 @@ des.js@^1.0.0: inherits "^2.0.1" minimalistic-assert "^1.0.0" -detect-indent@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" - dependencies: - repeating "^2.0.0" - di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" -diff@^3.1.0: +diff@^3.1.0, diff@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" @@ -1119,18 +1045,6 @@ domain-browser@^1.1.1: version "1.1.7" resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.1.7.tgz#867aa4b093faa05f1de08c06f4d7b21fdf8698bc" -dot-prop@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-3.0.0.tgz#1b708af094a49c9a0e7dbcad790aba539dac1177" - dependencies: - is-obj "^1.0.0" - -duplexer2@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" - dependencies: - readable-stream "^2.0.2" - ecc-jsbn@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" @@ -1381,10 +1295,6 @@ fill-range@^2.1.0: repeat-element "^1.1.2" repeat-string "^1.5.2" -filled-array@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/filled-array/-/filled-array-1.1.0.tgz#c3c4f6c663b923459a9aa29912d2d031f1507f84" - finalhandler@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.3.tgz#ef47e77950e999780e86022a560e3217e0d0cc89" @@ -1422,7 +1332,7 @@ forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" -form-data@^2.0.0, form-data@~2.1.1: +form-data@~2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.2.tgz#89c3534008b97eada4cbb157d58f6f5df025eae4" dependencies: @@ -1556,27 +1466,7 @@ good-listener@^1.2.0: dependencies: delegate "^3.1.2" -got@^5.0.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/got/-/got-5.7.1.tgz#5f81635a61e4a6589f180569ea4e381680a51f35" - dependencies: - create-error-class "^3.0.1" - duplexer2 "^0.1.4" - is-redirect "^1.0.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - lowercase-keys "^1.0.0" - node-status-codes "^1.0.0" - object-assign "^4.0.1" - parse-json "^2.1.0" - pinkie-promise "^2.0.0" - read-all-stream "^3.0.0" - readable-stream "^2.0.5" - timed-out "^3.0.0" - unzip-response "^1.0.2" - url-parse-lax "^1.0.0" - -graceful-fs@^4.1.11, graceful-fs@^4.1.2: +graceful-fs@^4.1.2: version "4.1.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" @@ -1623,7 +1513,7 @@ has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" -has-unicode@^2.0.0, has-unicode@^2.0.1: +has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" @@ -1708,14 +1598,6 @@ http-errors@~1.6.1: setprototypeof "1.0.3" statuses ">= 1.3.1 < 2" -http-proxy-agent@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-1.0.0.tgz#cc1ce38e453bf984a0f7702d2dd59c73d081284a" - dependencies: - agent-base "2" - debug "2" - extend "3" - http-proxy@^1.13.0: version "1.16.2" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.16.2.tgz#06dff292952bf64dbe8471fa9df73066d4f37742" @@ -1755,10 +1637,6 @@ ieee754@^1.1.4: version "1.1.8" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4" -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" @@ -1796,12 +1674,6 @@ interpret@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.1.tgz#d579fb7f693b858004947af39fa0db49f795602c" -invariant@^2.2.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" - dependencies: - loose-envify "^1.0.0" - invert-kv@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" @@ -1810,13 +1682,6 @@ is-absolute-url@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" -is-absolute@^0.2.3: - version "0.2.6" - resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-0.2.6.tgz#20de69f3db942ef2d87b9c2da36f172235b1b5eb" - dependencies: - is-relative "^0.2.1" - is-windows "^0.2.0" - is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -1882,10 +1747,6 @@ is-my-json-valid@^2.12.4: jsonpointer "^4.0.0" xtend "^4.0.0" -is-npm@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" - is-number@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-number/-/is-number-0.1.1.tgz#69a7af116963d47206ec9bd9b48a14216f1e3806" @@ -1896,10 +1757,6 @@ is-number@^2.0.2, is-number@^2.1.0: dependencies: kind-of "^3.0.2" -is-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - is-path-cwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" @@ -1932,24 +1789,6 @@ is-property@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" -is-redirect@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" - -is-relative@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-0.2.1.tgz#d27f4c7d516d175fb610db84bbeef23c3bc97aa5" - dependencies: - is-unc-path "^0.1.1" - -is-retry-allowed@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" - -is-stream@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - is-svg@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-2.1.0.tgz#cf61090da0d9efbcab8722deba6f032208dbb0e9" @@ -1960,20 +1799,10 @@ is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" -is-unc-path@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-0.1.2.tgz#6ab053a72573c10250ff416a3814c35178af39b9" - dependencies: - unc-path-regex "^0.1.0" - is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" -is-windows@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c" - isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" @@ -1990,7 +1819,7 @@ isexe@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/isexe/-/isexe-1.1.2.tgz#36f3e22e60750920f5e7241a476a8c6a42275ad0" -isobject@^2.0.0, isobject@^2.1.0: +isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" dependencies: @@ -2200,20 +2029,10 @@ kind-of@^3.0.2: dependencies: is-buffer "^1.0.2" -latest-version@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-2.0.0.tgz#56f8d6139620847b8017f8f1f4d78e211324168b" - dependencies: - package-json "^2.0.0" - lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" -lazy-req@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/lazy-req/-/lazy-req-1.1.0.tgz#bdaebead30f8d824039ce0ce149d4daa07ba1fac" - lcid@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" @@ -2227,10 +2046,6 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -listify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/listify/-/listify-1.0.0.tgz#03ca7ba2d150d4267773f74e57558d1053d2bee3" - load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" @@ -2262,10 +2077,6 @@ loader-utils@^1.0.2: emojis-list "^2.0.0" json5 "^0.5.0" -lockfile@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.3.tgz#2638fc39a0331e9cac1a04b71799931c9c50df79" - lodash._createcompounder@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lodash._createcompounder/-/lodash._createcompounder-3.0.0.tgz#5dd2cb55372d6e70e0e2392fb2304d6631091075" @@ -2322,12 +2133,6 @@ longest@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" -loose-envify@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" - dependencies: - js-tokens "^3.0.0" - loud-rejection@^1.0.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" @@ -2339,10 +2144,6 @@ lower-case@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" -lowercase-keys@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" - lru-cache@2.2.x: version "2.2.4" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.2.4.tgz#6c658619becf14031d0d0b594b16042ce4dc063d" @@ -2351,13 +2152,7 @@ macaddress@^0.2.8: version "0.2.8" resolved "https://registry.yarnpkg.com/macaddress/-/macaddress-0.2.8.tgz#5904dc537c39ec6dbefeae902327135fa8511f12" -make-error-cause@^1.2.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/make-error-cause/-/make-error-cause-1.2.2.tgz#df0388fcd0b37816dff0a5fb8108939777dcbc9d" - dependencies: - make-error "^1.2.0" - -make-error@^1.1.1, make-error@^1.2.0: +make-error@^1.1.1: version "1.2.1" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.2.1.tgz#9a6dfb4844423b9f145806728d05c6e935670e75" @@ -2452,6 +2247,12 @@ minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: dependencies: brace-expansion "^1.0.0" +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + dependencies: + brace-expansion "^1.1.7" + minimist@0.0.8, minimist@~0.0.1: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" @@ -2460,7 +2261,7 @@ minimist@^1.1.3, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" -mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: +mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" dependencies: @@ -2561,22 +2362,12 @@ node-pre-gyp@^0.6.29: tar "~2.2.1" tar-pack "~3.3.0" -node-status-codes@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/node-status-codes/-/node-status-codes-1.0.0.tgz#5ae5541d024645d32a58fcddc9ceecea7ae3ac2f" - nopt@3.x, nopt@~3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" dependencies: abbrev "1" -nopt@~1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" - dependencies: - abbrev "1" - normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: version "2.3.5" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.5.tgz#8d924f142960e1777e7ffe170543631cc7cb02df" @@ -2647,12 +2438,6 @@ object.omit@^2.0.0: for-own "^0.1.4" is-extendable "^0.1.1" -object.pick@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.2.0.tgz#b5392bee9782da6d9fb7d6afaf539779f1234c2b" - dependencies: - isobject "^2.1.0" - on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" @@ -2697,36 +2482,16 @@ os-browserify@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.2.1.tgz#63fc4ccee5d2d7763d26bbf8601078e6c2e0044f" -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - os-locale@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" dependencies: lcid "^1.0.0" -os-tmpdir@^1.0.0, os-tmpdir@~1.0.1: +os-tmpdir@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" -osenv@^0.1.0: - version "0.1.4" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -package-json@^2.0.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-2.4.0.tgz#0d15bd67d1cbbddbb2ca222ff2edb86bcb31a8bb" - dependencies: - got "^5.0.0" - registry-auth-token "^3.0.1" - registry-url "^3.0.3" - semver "^5.1.0" - pako@~0.2.0: version "0.2.9" resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" @@ -2756,7 +2521,7 @@ parse-glob@^3.0.4: is-extglob "^1.0.0" is-glob "^2.0.0" -parse-json@^2.1.0, parse-json@^2.2.0: +parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" dependencies: @@ -2802,6 +2567,10 @@ path-is-inside@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" +path-parse@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" + path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" @@ -2830,41 +2599,6 @@ pinkie@^2.0.0, pinkie@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" -popsicle-proxy-agent@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/popsicle-proxy-agent/-/popsicle-proxy-agent-3.0.0.tgz#b9133c55d945759ab7ee61b7711364620d3aeadc" - dependencies: - http-proxy-agent "^1.0.0" - https-proxy-agent "^1.0.0" - -popsicle-retry@^3.2.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/popsicle-retry/-/popsicle-retry-3.2.1.tgz#e06e866533b42a7a123eb330cbe63a7cebcba10c" - dependencies: - any-promise "^1.1.0" - xtend "^4.0.1" - -popsicle-rewrite@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/popsicle-rewrite/-/popsicle-rewrite-1.0.0.tgz#1dd4e8ea9c3182351fb820f87934d992f7fb9007" - -popsicle-status@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/popsicle-status/-/popsicle-status-2.0.0.tgz#54e12722376efba0a353abdf53cbf1ce0e852efa" - -popsicle@^8.0.2: - version "8.2.0" - resolved "https://registry.yarnpkg.com/popsicle/-/popsicle-8.2.0.tgz#ff4401005cab43a9418a91410611c00197712d21" - dependencies: - any-promise "^1.3.0" - arrify "^1.0.0" - concat-stream "^1.4.7" - form-data "^2.0.0" - make-error-cause "^1.2.1" - throwback "^1.1.0" - tough-cookie "^2.0.0" - xtend "^4.0.0" - postcss-calc@^5.2.0: version "5.3.1" resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-5.3.1.tgz#77bae7ca928ad85716e2fda42f261bf7c1d65b5e" @@ -3107,7 +2841,7 @@ prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" -prepend-http@^1.0.0, prepend-http@^1.0.1: +prepend-http@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" @@ -3127,12 +2861,6 @@ process@^0.11.0: version "0.11.9" resolved "https://registry.yarnpkg.com/process/-/process-0.11.9.tgz#7bd5ad21aa6253e7da8682264f1e11d11c0318c1" -promise-finally@^2.0.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/promise-finally/-/promise-finally-2.2.1.tgz#22616c4ba902916e988bd46c54d7caa08910cd77" - dependencies: - any-promise "^1.3.0" - protractor@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/protractor/-/protractor-5.1.2.tgz#9b221741709a4c62d5cd53c6aadd54a71137e95f" @@ -3239,7 +2967,7 @@ raw-loader@~0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-0.5.1.tgz#0c3d0beaed8a01c966d9787bf778281252a979aa" -rc@^1.0.1, rc@^1.1.5, rc@^1.1.6, rc@~1.1.6: +rc@~1.1.6: version "1.1.7" resolved "https://registry.yarnpkg.com/rc/-/rc-1.1.7.tgz#c5ea564bb07aff9fd3a5b32e906c1d3a65940fea" dependencies: @@ -3248,13 +2976,6 @@ rc@^1.0.1, rc@^1.1.5, rc@^1.1.6, rc@~1.1.6: minimist "^1.2.0" strip-json-comments "~2.0.1" -read-all-stream@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/read-all-stream/-/read-all-stream-3.1.0.tgz#35c3e177f2078ef789ee4bfafa4373074eaef4fa" - dependencies: - pinkie-promise "^2.0.0" - readable-stream "^2.0.0" - read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" @@ -3270,7 +2991,7 @@ read-pkg@^1.0.0: normalize-package-data "^2.3.2" path-type "^1.0.0" -readable-stream@^2.0.0, "readable-stream@^2.0.0 || ^1.1.13", readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.0: +"readable-stream@^2.0.0 || ^1.1.13", readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.0: version "2.2.3" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.3.tgz#9cf49463985df016c8ae8813097a9293a9b33729" dependencies: @@ -3291,17 +3012,6 @@ readable-stream@~1.0.2: isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@~2.0.0: - version "2.0.6" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.0.6.tgz#8f90341e68a53ccc928788dacfcd11b36eb9b78e" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "~1.0.0" - process-nextick-args "~1.0.6" - string_decoder "~0.10.x" - util-deprecate "~1.0.1" - readable-stream@~2.1.4: version "2.1.5" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.1.5.tgz#66fa8b720e1438b364681f2ad1a63c618448c9d0" @@ -3353,10 +3063,6 @@ reduce-function-call@^1.0.1: dependencies: balanced-match "^0.4.2" -reflect-metadata@^0.1.8: - version "0.1.10" - resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.10.tgz#b4f83704416acad89988c9b15635d47e03b9344a" - regenerate@^1.2.1: version "1.3.2" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.2.tgz#d1941c67bad437e1be76433add5b385f95b19260" @@ -3376,18 +3082,6 @@ regexpu-core@^1.0.0: regjsgen "^0.2.0" regjsparser "^0.1.4" -registry-auth-token@^3.0.1: - version "3.1.0" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.1.0.tgz#997c08256e0c7999837b90e944db39d8a790276b" - dependencies: - rc "^1.1.6" - -registry-url@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" - dependencies: - rc "^1.0.1" - regjsgen@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" @@ -3461,6 +3155,12 @@ resolve@1.1.x: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" +resolve@^1.3.2: + version "1.3.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5" + dependencies: + path-parse "^1.0.5" + restangular@^1.2.0: version "1.6.1" resolved "https://registry.yarnpkg.com/restangular/-/restangular-1.6.1.tgz#add1743f27d77b0b8a652e6485a760b6234ed024" @@ -3473,7 +3173,7 @@ right-align@^0.1.1: dependencies: align-text "^0.1.1" -rimraf@2, rimraf@^2.2.8, rimraf@^2.4.4, rimraf@^2.5.2, rimraf@^2.5.4, rimraf@^2.6.0: +rimraf@2, rimraf@^2.2.8, rimraf@^2.5.2, rimraf@^2.5.4, rimraf@^2.6.0: version "2.6.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" dependencies: @@ -3542,13 +3242,7 @@ selenium-webdriver@^2.53.2: ws "^1.0.1" xml2js "0.4.4" -semver-diff@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" - dependencies: - semver "^5.0.3" - -"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@~5.3.0: +"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.3.0, semver@~5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" @@ -3598,10 +3292,6 @@ signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" -slide@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" - sntp@1.x.x: version "1.0.9" resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" @@ -3756,10 +3446,6 @@ strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" -string-template@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" - string-width@^1.0.1, string-width@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" @@ -3863,30 +3549,10 @@ tar@~2.2.1: fstream "^1.0.2" inherits "2" -thenify@^3.1.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.2.1.tgz#251fd1c80aff6e5cf57cb179ab1fcb724269bd11" - dependencies: - any-promise "^1.0.0" - -throat@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/throat/-/throat-3.0.0.tgz#e7c64c867cbb3845f10877642f7b60055b8ec0d6" - through@~2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" -throwback@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/throwback/-/throwback-1.1.1.tgz#f007e7c17604a6d16d7a07c41aa0e8fedc6184a4" - dependencies: - any-promise "^1.3.0" - -timed-out@^3.0.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-3.1.3.tgz#95860bfcc5c76c277f8f8326fd0f5b2e20eba217" - timers-browserify@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.2.tgz#ab4883cf597dcd50af211349a00fbca56ac86b86" @@ -3921,13 +3587,7 @@ to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" -touch@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/touch/-/touch-1.0.0.tgz#449cbe2dbae5a8c8038e30d71fa0ff464947c4de" - dependencies: - nopt "~1.0.10" - -tough-cookie@^2.0.0, tough-cookie@~2.3.0: +tough-cookie@~2.3.0: version "2.3.2" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" dependencies: @@ -4000,6 +3660,29 @@ tsconfig@^6.0.0: strip-bom "^3.0.0" strip-json-comments "^2.0.0" +tslib@^1.7.1: + version "1.7.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.7.1.tgz#bc8004164691923a79fe8378bbeb3da2017538ec" + +tslint@^5.4.3: + version "5.4.3" + resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.4.3.tgz#761c8402b80e347b7733a04390a757b253580467" + dependencies: + babel-code-frame "^6.22.0" + colors "^1.1.2" + commander "^2.9.0" + diff "^3.2.0" + glob "^7.1.1" + minimatch "^3.0.4" + resolve "^1.3.2" + semver "^5.3.0" + tslib "^1.7.1" + tsutils "^2.3.0" + +tsutils@^2.3.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.4.0.tgz#ad4ce6dba0e5a3edbddf8626b7ca040782189fea" + tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" @@ -4025,68 +3708,10 @@ type-is@~1.6.15: media-typer "0.3.0" mime-types "~2.1.15" -typedarray@~0.0.5: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - -typescript@>=2.1.4, typescript@^2.0.0, typescript@^2.0.3, typescript@^2.2.1: +typescript@>=2.1.4, typescript@^2.0.0, typescript@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.2.1.tgz#4862b662b988a4c8ff691cc7969622d24db76ae9" -typings-core@^1.5.0: - version "1.6.1" - resolved "https://registry.yarnpkg.com/typings-core/-/typings-core-1.6.1.tgz#ce4b2931ea2f19bb8f3dacbec69983ac4e964a37" - dependencies: - any-promise "^1.3.0" - array-uniq "^1.0.2" - configstore "^2.0.0" - debug "^2.2.0" - detect-indent "^4.0.0" - graceful-fs "^4.1.2" - has "^1.0.1" - invariant "^2.2.0" - is-absolute "^0.2.3" - listify "^1.0.0" - lockfile "^1.0.1" - make-error-cause "^1.2.1" - mkdirp "^0.5.1" - object.pick "^1.1.1" - parse-json "^2.2.0" - popsicle "^8.0.2" - popsicle-proxy-agent "^3.0.0" - popsicle-retry "^3.2.0" - popsicle-rewrite "^1.0.0" - popsicle-status "^2.0.0" - promise-finally "^2.0.1" - rc "^1.1.5" - rimraf "^2.4.4" - sort-keys "^1.0.0" - string-template "^1.0.0" - strip-bom "^2.0.0" - thenify "^3.1.0" - throat "^3.0.0" - touch "^1.0.0" - typescript "^2.0.3" - xtend "^4.0.0" - zip-object "^0.1.0" - -typings@1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/typings/-/typings-1.4.0.tgz#f699ae68e0ca62b66eb1bd8f4e145809be3c6b8f" - dependencies: - any-promise "^1.0.0" - archy "^1.0.0" - bluebird "^3.1.1" - chalk "^1.0.0" - columnify "^1.5.2" - has-unicode "^2.0.1" - listify "^1.0.0" - minimist "^1.2.0" - typings-core "^1.5.0" - update-notifier "^1.0.0" - wordwrap "^1.0.0" - xtend "^4.0.1" - uglify-js@3.0.x: version "3.0.10" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.0.10.tgz#e220e7dbc05ce1f9bb08268b221aa0cf152fb4e2" @@ -4115,10 +3740,6 @@ ultron@1.0.x: version "1.0.2" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa" -unc-path-regex@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" - underscore@^1.5.2: version "1.8.3" resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022" @@ -4141,23 +3762,6 @@ unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" -unzip-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-1.0.2.tgz#b984f0877fc0a89c2c773cc1ef7b5b232b5b06fe" - -update-notifier@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-1.0.3.tgz#8f92c515482bd6831b7c93013e70f87552c7cf5a" - dependencies: - boxen "^0.6.0" - chalk "^1.0.0" - configstore "^2.0.0" - is-npm "^1.0.0" - latest-version "^2.0.0" - lazy-req "^1.1.0" - semver-diff "^2.0.0" - xdg-basedir "^2.0.0" - upper-case@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" @@ -4166,12 +3770,6 @@ urijs@^1.18.10: version "1.18.10" resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.18.10.tgz#b94463eaba59a1a796036a467bb633c667f221ab" -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - dependencies: - prepend-http "^1.0.1" - url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" @@ -4204,10 +3802,6 @@ utils-merge@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.0.tgz#0294fb922bb9375153541c4f7096231f287c8af8" -uuid@^2.0.1: - version "2.0.3" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" - uuid@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" @@ -4253,12 +3847,6 @@ watchpack@^1.2.0: chokidar "^1.4.3" graceful-fs "^4.1.2" -wcwidth@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - dependencies: - defaults "^1.0.3" - webdriver-js-extender@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/webdriver-js-extender/-/webdriver-js-extender-1.0.0.tgz#81c533a9e33d5bfb597b4e63e2cdb25b54777515" @@ -4343,12 +3931,6 @@ wide-align@^1.1.0: dependencies: string-width "^1.0.1" -widest-line@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-1.0.0.tgz#0c09c85c2a94683d0d7eaf8ee097d564bf0e105c" - dependencies: - string-width "^1.0.1" - window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" @@ -4376,14 +3958,6 @@ wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" -write-file-atomic@^1.1.2: - version "1.3.1" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-1.3.1.tgz#7d45ba32316328dd1ec7d90f60ebc0d845bb759a" - dependencies: - graceful-fs "^4.1.11" - imurmurhash "^0.1.4" - slide "^1.1.5" - ws@1.1.2, ws@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.2.tgz#8a244fa052401e08c9886cf44a85189e1fd4067f" @@ -4395,12 +3969,6 @@ wtf-8@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wtf-8/-/wtf-8-1.0.0.tgz#392d8ba2d0f1c34d1ee2d630f15d0efb68e1048a" -xdg-basedir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-2.0.0.tgz#edbc903cc385fc04523d966a335504b5504d1bd2" - dependencies: - os-homedir "^1.0.0" - xml-char-classes@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/xml-char-classes/-/xml-char-classes-1.0.0.tgz#64657848a20ffc5df583a42ad8a277b4512bbc4d" @@ -4429,7 +3997,7 @@ xmlhttprequest-ssl@1.5.3: version "1.5.3" resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz#185a888c04eca46c3e4070d99f7b49de3528992d" -xtend@^4.0.0, xtend@^4.0.1: +xtend@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" @@ -4485,7 +4053,3 @@ yn@^2.0.0: zeroclipboard@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/zeroclipboard/-/zeroclipboard-2.3.0.tgz#592ebd833a4308688b0739697d3dbf989002c9af" - -zip-object@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/zip-object/-/zip-object-0.1.0.tgz#c1a0da04c88c837756e248680a03ff902ec3f53a" From ae0ed04621cc9f6b191d8e6911cc69c35dbb104c Mon Sep 17 00:00:00 2001 From: alecmerdler Date: Tue, 27 Jun 2017 11:56:14 -0700 Subject: [PATCH 13/22] remove reference to typings NPM package in Dockerfile --- .gitignore | 1 - Dockerfile | 1 - quay-base.dockerfile | 2 +- 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 92c6f1dea..e548f6174 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,6 @@ static/fonts static/build stack_local test/data/registry/ -typings GIT_HEAD .idea .python-version diff --git a/Dockerfile b/Dockerfile index c424800e3..fd48d3254 100644 --- a/Dockerfile +++ b/Dockerfile @@ -89,7 +89,6 @@ RUN ln -s /usr/bin/nodejs /usr/bin/node ADD package.json package.json ADD tsconfig.json tsconfig.json ADD webpack.config.js webpack.config.js -ADD typings.json typings.json ADD yarn.lock yarn.lock RUN yarn install --ignore-engines diff --git a/quay-base.dockerfile b/quay-base.dockerfile index 8fde99c75..7b01b3573 100644 --- a/quay-base.dockerfile +++ b/quay-base.dockerfile @@ -80,7 +80,7 @@ RUN curl -L -o /usr/local/bin/prometheus-aggregator https://github.com/coreos/pr # Install front-end dependencies RUN ln -s /usr/bin/nodejs /usr/bin/node -COPY static/ package.json tsconfig.json webpack.config.js typings.json yarn.lock ./ +COPY static/ package.json tsconfig.json webpack.config.js tslint.json yarn.lock ./ RUN yarn install --ignore-engines From 2f018046ecf726689158510bf36263f840dd7c8a Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 Jun 2017 11:38:36 +0300 Subject: [PATCH 14/22] Move conduct_call into a common test lib for all endpoints --- endpoints/api/test/shared.py | 52 +----------------- endpoints/api/test/test_disallow_for_apps.py | 3 +- endpoints/api/test/test_organization.py | 3 +- endpoints/api/test/test_repository.py | 7 +-- endpoints/api/test/test_search.py | 3 +- endpoints/api/test/test_security.py | 3 +- endpoints/api/test/test_signing.py | 13 ++--- endpoints/api/test/test_tag.py | 4 +- endpoints/api/test/test_team.py | 4 +- endpoints/appr/test/test_api_security.py | 2 +- endpoints/test/__init__.py | 0 endpoints/test/shared.py | 55 ++++++++++++++++++++ 12 files changed, 83 insertions(+), 66 deletions(-) create mode 100644 endpoints/test/__init__.py create mode 100644 endpoints/test/shared.py diff --git a/endpoints/api/test/shared.py b/endpoints/api/test/shared.py index 3d1f0cffa..1d35cdbc5 100644 --- a/endpoints/api/test/shared.py +++ b/endpoints/api/test/shared.py @@ -1,58 +1,10 @@ -import datetime -import json - -from contextlib import contextmanager -from data import model +from endpoints.test.shared import conduct_call from endpoints.api import api -CSRF_TOKEN_KEY = '_csrf_token' -CSRF_TOKEN = '123csrfforme' - - -@contextmanager -def client_with_identity(auth_username, client): - with client.session_transaction() as sess: - if auth_username and auth_username is not None: - loaded = model.user.get_user(auth_username) - sess['user_id'] = loaded.uuid - sess['login_time'] = datetime.datetime.now() - sess[CSRF_TOKEN_KEY] = CSRF_TOKEN - else: - sess['user_id'] = 'anonymous' - - yield client - - with client.session_transaction() as sess: - sess['user_id'] = None - sess['login_time'] = None - sess[CSRF_TOKEN_KEY] = None - - -def add_csrf_param(params): - """ Returns a params dict with the CSRF parameter added. """ - params = params or {} - params[CSRF_TOKEN_KEY] = CSRF_TOKEN - return params - - def conduct_api_call(client, resource, method, params, body=None, expected_code=200): """ Conducts an API call to the given resource via the given client, and ensures its returned status matches the code given. Returns the response. """ - params = add_csrf_param(params) - - final_url = api.url_for(resource, **params) - - headers = {} - headers.update({"Content-Type": "application/json"}) - - if body is not None: - body = json.dumps(body) - - rv = client.open(final_url, method=method, data=body, headers=headers) - msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code, - rv.data) - assert rv.status_code == expected_code, msg - return rv + return conduct_call(client, resource, api.url_for, method, params, body, expected_code) diff --git a/endpoints/api/test/test_disallow_for_apps.py b/endpoints/api/test/test_disallow_for_apps.py index 6de35c03b..b9112c291 100644 --- a/endpoints/api/test/test_disallow_for_apps.py +++ b/endpoints/api/test/test_disallow_for_apps.py @@ -16,7 +16,8 @@ from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerS BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger, TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources, BuildTriggerSourceNamespaces) -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call +from endpoints.test.shared import client_with_identity from test.fixtures import * BUILD_ARGS = {'build_uuid': '1234'} diff --git a/endpoints/api/test/test_organization.py b/endpoints/api/test/test_organization.py index 65b9a85d4..9a6525113 100644 --- a/endpoints/api/test/test_organization.py +++ b/endpoints/api/test/test_organization.py @@ -2,8 +2,9 @@ import pytest from data import model from endpoints.api import api -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.organization import Organization +from endpoints.test.shared import client_with_identity from test.fixtures import * @pytest.mark.parametrize('expiration, expected_code', [ diff --git a/endpoints/api/test/test_repository.py b/endpoints/api/test/test_repository.py index d110f5760..999beb00d 100644 --- a/endpoints/api/test/test_repository.py +++ b/endpoints/api/test/test_repository.py @@ -2,8 +2,9 @@ import pytest from mock import patch, ANY, MagicMock -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.repository import RepositoryTrust, Repository +from endpoints.test.shared import client_with_identity from features import FeatureNameValue from test.fixtures import * @@ -52,8 +53,8 @@ def test_signing_disabled(client): params = {'repository': 'devtable/simple'} response = conduct_api_call(cl, Repository, 'GET', params).json assert not response['trust_enabled'] - - + + def test_sni_support(): import ssl assert ssl.HAS_SNI diff --git a/endpoints/api/test/test_search.py b/endpoints/api/test/test_search.py index 4efba0841..1cca8d548 100644 --- a/endpoints/api/test/test_search.py +++ b/endpoints/api/test/test_search.py @@ -4,7 +4,8 @@ from playhouse.test_utils import assert_query_count from data.model import _basequery from endpoints.api.search import ConductRepositorySearch, ConductSearch -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call +from endpoints.test.shared import client_with_identity from test.fixtures import * @pytest.mark.parametrize('query, expected_query_count', [ diff --git a/endpoints/api/test/test_security.py b/endpoints/api/test/test_security.py index 40140b6fa..68039aed7 100644 --- a/endpoints/api/test/test_security.py +++ b/endpoints/api/test/test_security.py @@ -4,12 +4,13 @@ from flask_principal import AnonymousIdentity from endpoints.api import api from endpoints.api.repositorynotification import RepositoryNotification from endpoints.api.team import OrganizationTeamSyncing -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.repository import RepositoryTrust from endpoints.api.signing import RepositorySignatures from endpoints.api.search import ConductRepositorySearch from endpoints.api.superuser import SuperUserRepositoryBuildLogs, SuperUserRepositoryBuildResource from endpoints.api.superuser import SuperUserRepositoryBuildStatus +from endpoints.test.shared import client_with_identity from test.fixtures import * diff --git a/endpoints/api/test/test_signing.py b/endpoints/api/test/test_signing.py index 31f37d632..e941cee56 100644 --- a/endpoints/api/test/test_signing.py +++ b/endpoints/api/test/test_signing.py @@ -3,8 +3,9 @@ import pytest from collections import Counter from mock import patch -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.signing import RepositorySignatures +from endpoints.test.shared import client_with_identity from test.fixtures import * @@ -14,21 +15,21 @@ VALID_TARGETS_MAP = { "latest": { "hashes": { "sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ=" - }, + }, "length": 2111 } - }, + }, "expiration": "2020-05-22T10:26:46.618176424-04:00" - }, + }, "targets": { "targets": { "latest": { "hashes": { "sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ=" - }, + }, "length": 2111 } - }, + }, "expiration": "2020-05-22T10:26:01.953414888-04:00"} } diff --git a/endpoints/api/test/test_tag.py b/endpoints/api/test/test_tag.py index 0c80ef4ee..a94261fc4 100644 --- a/endpoints/api/test/test_tag.py +++ b/endpoints/api/test/test_tag.py @@ -2,8 +2,10 @@ import pytest from mock import patch, Mock -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.tag import RepositoryTag, RestoreTag +from endpoints.test.shared import client_with_identity + from features import FeatureNameValue from test.fixtures import * diff --git a/endpoints/api/test/test_team.py b/endpoints/api/test/test_team.py index c40f8f199..9a17a36e4 100644 --- a/endpoints/api/test/test_team.py +++ b/endpoints/api/test/test_team.py @@ -4,9 +4,11 @@ from mock import patch from data import model from endpoints.api import api -from endpoints.api.test.shared import client_with_identity, conduct_api_call +from endpoints.api.test.shared import conduct_api_call from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList from endpoints.api.organization import Organization +from endpoints.test.shared import client_with_identity + from test.test_ldap import mock_ldap from test.fixtures import * diff --git a/endpoints/appr/test/test_api_security.py b/endpoints/appr/test/test_api_security.py index e37b2f092..c3e52b30c 100644 --- a/endpoints/appr/test/test_api_security.py +++ b/endpoints/appr/test/test_api_security.py @@ -5,7 +5,7 @@ from flask import url_for from data import model from endpoints.appr.registry import appr_bp, blobs -from endpoints.api.test.shared import client_with_identity +from endpoints.test.shared import client_with_identity from test.fixtures import * BLOB_ARGS = {'digest': 'abcd1235'} diff --git a/endpoints/test/__init__.py b/endpoints/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/endpoints/test/shared.py b/endpoints/test/shared.py new file mode 100644 index 000000000..baf7de18f --- /dev/null +++ b/endpoints/test/shared.py @@ -0,0 +1,55 @@ +import datetime +import json + +from contextlib import contextmanager +from data import model + +CSRF_TOKEN_KEY = '_csrf_token' +CSRF_TOKEN = '123csrfforme' + +@contextmanager +def client_with_identity(auth_username, client): + with client.session_transaction() as sess: + if auth_username and auth_username is not None: + loaded = model.user.get_user(auth_username) + sess['user_id'] = loaded.uuid + sess['login_time'] = datetime.datetime.now() + sess[CSRF_TOKEN_KEY] = CSRF_TOKEN + else: + sess['user_id'] = 'anonymous' + + yield client + + with client.session_transaction() as sess: + sess['user_id'] = None + sess['login_time'] = None + sess[CSRF_TOKEN_KEY] = None + + +def add_csrf_param(params): + """ Returns a params dict with the CSRF parameter added. """ + params = params or {} + + if not CSRF_TOKEN_KEY in params: + params[CSRF_TOKEN_KEY] = CSRF_TOKEN + + return params + + +def conduct_call(client, resource, url_for, method, params, body=None, expected_code=200, headers=None): + """ Conducts a call to a Flask endpoint. """ + params = add_csrf_param(params) + + final_url = url_for(resource, **params) + + headers = headers or {} + headers.update({"Content-Type": "application/json"}) + + if body is not None: + body = json.dumps(body) + + rv = client.open(final_url, method=method, data=body, headers=headers) + msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code, + rv.data) + assert rv.status_code == expected_code, msg + return rv From 8ac20edfb2719ea9bf124afb54d7d971931da37e Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 Jun 2017 12:48:02 +0300 Subject: [PATCH 15/22] Move verbs security tests into pytest style --- endpoints/test/shared.py | 15 +++- endpoints/verbs/test/test_security.py | 69 ++++++++++++++++++ test/fixtures.py | 2 + test/specs.py | 97 ------------------------- test/test_verbs_endpoint_security.py | 100 -------------------------- 5 files changed, 85 insertions(+), 198 deletions(-) create mode 100644 endpoints/verbs/test/test_security.py delete mode 100644 test/test_verbs_endpoint_security.py diff --git a/endpoints/test/shared.py b/endpoints/test/shared.py index baf7de18f..abb22ded9 100644 --- a/endpoints/test/shared.py +++ b/endpoints/test/shared.py @@ -1,9 +1,13 @@ import datetime import json +import base64 from contextlib import contextmanager from data import model +from flask import g +from flask_principal import Identity + CSRF_TOKEN_KEY = '_csrf_token' CSRF_TOKEN = '123csrfforme' @@ -36,7 +40,13 @@ def add_csrf_param(params): return params -def conduct_call(client, resource, url_for, method, params, body=None, expected_code=200, headers=None): +def gen_basic_auth(username, password): + """ Generates a basic auth header. """ + return 'Basic ' + base64.b64encode("%s:%s" % (username, password)) + + +def conduct_call(client, resource, url_for, method, params, body=None, expected_code=200, + headers=None): """ Conducts a call to a Flask endpoint. """ params = add_csrf_param(params) @@ -48,6 +58,9 @@ def conduct_call(client, resource, url_for, method, params, body=None, expected_ if body is not None: body = json.dumps(body) + # Required for anonymous calls to not exception. + g.identity = Identity(None, 'none') + rv = client.open(final_url, method=method, data=body, headers=headers) msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code, rv.data) diff --git a/endpoints/verbs/test/test_security.py b/endpoints/verbs/test/test_security.py new file mode 100644 index 000000000..5e53c68a8 --- /dev/null +++ b/endpoints/verbs/test/test_security.py @@ -0,0 +1,69 @@ +import pytest + +from flask import url_for +from endpoints.test.shared import conduct_call, gen_basic_auth +from test.fixtures import * + +NO_ACCESS_USER = 'freshuser' +READ_ACCESS_USER = 'reader' +ADMIN_ACCESS_USER = 'devtable' +CREATOR_ACCESS_USER = 'creator' + +PUBLIC_REPO = 'public/publicrepo' +PRIVATE_REPO = 'devtable/shared' +ORG_REPO = 'buynlarge/orgrepo' +ANOTHER_ORG_REPO = 'buynlarge/anotherorgrepo' + +ACI_ARGS = { + 'server': 'someserver', + 'tag': 'fake', + 'os': 'linux', + 'arch': 'x64', +} + +@pytest.mark.parametrize('user', [ + (0, None), + (1, NO_ACCESS_USER), + (2, READ_ACCESS_USER), + (3, CREATOR_ACCESS_USER), + (4, ADMIN_ACCESS_USER), +]) +@pytest.mark.parametrize('endpoint,method,repository,single_repo_path,params,expected_statuses', [ + ('get_aci_signature', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), + ('get_aci_signature', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_signature', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_signature', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), + + # get_aci_image + ('get_aci_image', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), + ('get_aci_image', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_image', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_image', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), + + # get_squashed_tag + ('get_squashed_tag', 'GET', PUBLIC_REPO, False, dict(tag='fake'), (404, 404, 404, 404, 404)), + ('get_squashed_tag', 'GET', PRIVATE_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), + ('get_squashed_tag', 'GET', ORG_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), + ('get_squashed_tag', 'GET', ANOTHER_ORG_REPO, False, dict(tag='fake'), (403, 403, 403, 403, 404)), + + # get_tag_torrent + ('get_tag_torrent', 'GET', PUBLIC_REPO, True, dict(digest='sha256:1234'), (404, 404, 404, 404, 404)), + ('get_tag_torrent', 'GET', PRIVATE_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, 404)), + ('get_tag_torrent', 'GET', ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, 404)), + ('get_tag_torrent', 'GET', ANOTHER_ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 403, 403, 404)), +]) +def test_verbs_security(user, endpoint, method, repository, single_repo_path, params, + expected_statuses, app, client): + headers = {} + if user[1] is not None: + headers['Authorization'] = gen_basic_auth(user[1], 'password') + + if single_repo_path: + params['repository'] = repository + else: + (namespace, repo_name) = repository.split('/') + params['namespace'] = namespace + params['repository'] = repo_name + + conduct_call(client, 'verbs.' + endpoint, url_for, method, params, + expected_code=expected_statuses[user[0]], headers=headers) diff --git a/test/fixtures.py b/test/fixtures.py index bee8199e1..c1f9e3b74 100644 --- a/test/fixtures.py +++ b/test/fixtures.py @@ -15,6 +15,7 @@ from data.model.user import LoginWrappedDBUser from endpoints.api import api_bp from endpoints.appr import appr_bp from endpoints.web import web +from endpoints.verbs import verbs as verbs_bp from initdb import initialize_database, populate_database @@ -166,6 +167,7 @@ def app(appconfig, initialized_db): app.register_blueprint(api_bp, url_prefix='/api') app.register_blueprint(appr_bp, url_prefix='/cnr') app.register_blueprint(web, url_prefix='/') + app.register_blueprint(verbs_bp, url_prefix='/c1') app.config.update(appconfig) return app diff --git a/test/specs.py b/test/specs.py index d7bb79061..54d6d8c64 100644 --- a/test/specs.py +++ b/test/specs.py @@ -509,100 +509,3 @@ def build_v2_index_specs(): request_status(401, 401, 401, 401, 404), ] - -class VerbTestSpec(object): - def __init__(self, index_name, method_name, repo_name, rpath=False, **kwargs): - self.index_name = index_name - self.repo_name = repo_name - self.method_name = method_name - self.single_repository_path = rpath - - self.kwargs = kwargs - - self.anon_code = 401 - self.no_access_code = 403 - self.read_code = 200 - self.admin_code = 200 - self.creator_code = 200 - - def request_status(self, anon_code=401, no_access_code=403, read_code=200, creator_code=200, - admin_code=200): - self.anon_code = anon_code - self.no_access_code = no_access_code - self.read_code = read_code - self.creator_code = creator_code - self.admin_code = admin_code - return self - - def get_url(self): - if self.single_repository_path: - return url_for(self.index_name, repository=self.repo_name, **self.kwargs) - else: - (namespace, repo_name) = self.repo_name.split('/') - return url_for(self.index_name, namespace=namespace, repository=repo_name, **self.kwargs) - - def gen_basic_auth(self, username, password): - encoded = b64encode('%s:%s' % (username, password)) - return 'basic %s' % encoded - -ACI_ARGS = { - 'server': 'someserver', - 'tag': 'fake', - 'os': 'linux', - 'arch': 'x64', -} - -def build_verbs_specs(): - return [ - # get_aci_signature - VerbTestSpec('verbs.get_aci_signature', 'GET', PUBLIC_REPO, **ACI_ARGS). - request_status(404, 404, 404, 404, 404), - - VerbTestSpec('verbs.get_aci_signature', 'GET', PRIVATE_REPO, **ACI_ARGS). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_aci_signature', 'GET', ORG_REPO, **ACI_ARGS). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_aci_signature', 'GET', ANOTHER_ORG_REPO, **ACI_ARGS). - request_status(403, 403, 403, 403, 404), - - # get_aci_image - VerbTestSpec('verbs.get_aci_image', 'GET', PUBLIC_REPO, **ACI_ARGS). - request_status(404, 404, 404, 404, 404), - - VerbTestSpec('verbs.get_aci_image', 'GET', PRIVATE_REPO, **ACI_ARGS). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_aci_image', 'GET', ORG_REPO, **ACI_ARGS). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_aci_image', 'GET', ANOTHER_ORG_REPO, **ACI_ARGS). - request_status(403, 403, 403, 403, 404), - - # get_squashed_tag - VerbTestSpec('verbs.get_squashed_tag', 'GET', PUBLIC_REPO, tag='fake'). - request_status(404, 404, 404, 404, 404), - - VerbTestSpec('verbs.get_squashed_tag', 'GET', PRIVATE_REPO, tag='fake'). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_squashed_tag', 'GET', ORG_REPO, tag='fake'). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_squashed_tag', 'GET', ANOTHER_ORG_REPO, tag='fake'). - request_status(403, 403, 403, 403, 404), - - # get_tag_torrent - VerbTestSpec('verbs.get_tag_torrent', 'GET', PUBLIC_REPO, digest='sha256:1234', rpath=True). - request_status(404, 404, 404, 404, 404), - - VerbTestSpec('verbs.get_tag_torrent', 'GET', PRIVATE_REPO, digest='sha256:1234', rpath=True). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_tag_torrent', 'GET', ORG_REPO, digest='sha256:1234', rpath=True). - request_status(403, 403, 404, 403, 404), - - VerbTestSpec('verbs.get_tag_torrent', 'GET', ANOTHER_ORG_REPO, digest='sha256:1234', rpath=True). - request_status(403, 403, 403, 403, 404), - ] diff --git a/test/test_verbs_endpoint_security.py b/test/test_verbs_endpoint_security.py deleted file mode 100644 index ac6ac36b9..000000000 --- a/test/test_verbs_endpoint_security.py +++ /dev/null @@ -1,100 +0,0 @@ -import unittest - -import endpoints.decorated # Register the various exceptions via decorators. - -from app import app -from endpoints.verbs import verbs -from initdb import setup_database_for_testing, finished_database_for_testing -from test.specs import build_verbs_specs - -app.register_blueprint(verbs, url_prefix='/c1') - -NO_ACCESS_USER = 'freshuser' -READ_ACCESS_USER = 'reader' -ADMIN_ACCESS_USER = 'devtable' -CREATOR_ACCESS_USER = 'creator' - - -class EndpointTestCase(unittest.TestCase): - def setUp(self): - setup_database_for_testing(self) - - def tearDown(self): - finished_database_for_testing(self) - - -class _SpecTestBuilder(type): - @staticmethod - def _test_generator(url, test_spec, attrs): - def test(self): - with app.test_client() as c: - headers = {} - - if attrs['auth_username']: - headers['Authorization'] = test_spec.gen_basic_auth(attrs['auth_username'], 'password') - - expected_status = getattr(test_spec, attrs['result_attr']) - - rv = c.open(url, headers=headers, method=test_spec.method_name) - msg = '%s %s: got %s, expected: %s (auth: %s | headers %s)' % (test_spec.method_name, - test_spec.index_name, rv.status_code, expected_status, attrs['auth_username'], - headers) - - self.assertEqual(rv.status_code, expected_status, msg) - - return test - - - def __new__(cls, name, bases, attrs): - with app.test_request_context() as ctx: - specs = attrs['spec_func']() - for test_spec in specs: - test_name = '%s_%s_%s_%s_%s' % (test_spec.index_name, test_spec.method_name, - test_spec.repo_name, attrs['auth_username'] or 'anon', - attrs['result_attr']) - test_name = test_name.replace('/', '_').replace('-', '_') - - test_name = 'test_' + test_name.lower().replace('verbs.', 'verbs_') - url = test_spec.get_url() - attrs[test_name] = _SpecTestBuilder._test_generator(url, test_spec, attrs) - - return type(name, bases, attrs) - - -class TestAnonymousAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder - spec_func = build_verbs_specs - result_attr = 'anon_code' - auth_username = None - - -class TestNoAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder - spec_func = build_verbs_specs - result_attr = 'no_access_code' - auth_username = NO_ACCESS_USER - - -class TestReadAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder - spec_func = build_verbs_specs - result_attr = 'read_code' - auth_username = READ_ACCESS_USER - - -class TestCreatorAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder - spec_func = build_verbs_specs - result_attr = 'creator_code' - auth_username = CREATOR_ACCESS_USER - - -class TestAdminAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder - spec_func = build_verbs_specs - result_attr = 'admin_code' - auth_username = ADMIN_ACCESS_USER - - -if __name__ == '__main__': - unittest.main() From 8b4958dbccefae39c6ec7bf25829e4ac17331112 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 Jun 2017 13:05:02 +0300 Subject: [PATCH 16/22] Move verbs model into new file structure --- endpoints/verbs/__init__.py | 2 +- endpoints/verbs/models_interface.py | 146 ++++++++++++++++ .../verbs/models_pre_oci.py | 157 +----------------- 3 files changed, 156 insertions(+), 149 deletions(-) create mode 100644 endpoints/verbs/models_interface.py rename data/interfaces/verbs.py => endpoints/verbs/models_pre_oci.py (61%) diff --git a/endpoints/verbs/__init__.py b/endpoints/verbs/__init__.py index 27a5f2330..76ddc0498 100644 --- a/endpoints/verbs/__init__.py +++ b/endpoints/verbs/__init__.py @@ -10,9 +10,9 @@ from auth.auth_context import get_authenticated_user from auth.decorators import process_auth from auth.permissions import ReadRepositoryPermission from data import database -from data.interfaces.verbs import pre_oci_model as model from endpoints.common import route_show_if, parse_repository_name from endpoints.decorators import anon_protect +from endpoints.verbs.models_pre_oci import pre_oci_model as model from endpoints.v2.blob import BLOB_DIGEST_ROUTE from image.appc import AppCImageFormatter from image.docker.squashed import SquashedDockerImageFormatter diff --git a/endpoints/verbs/models_interface.py b/endpoints/verbs/models_interface.py new file mode 100644 index 000000000..868b0e76f --- /dev/null +++ b/endpoints/verbs/models_interface.py @@ -0,0 +1,146 @@ +from abc import ABCMeta, abstractmethod +from collections import namedtuple + +from six import add_metaclass + +class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', + 'is_public', 'kind'])): + """ + Repository represents a namespaced collection of tags. + :type id: int + :type name: string + :type namespace_name: string + :type description: string + :type is_public: bool + :type kind: string + """ + + +class DerivedImage(namedtuple('DerivedImage', ['ref', 'blob', 'internal_source_image_db_id'])): + """ + DerivedImage represents a user-facing alias for an image which was derived from another image. + """ + +class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])): + """ + RepositoryReference represents a reference to a Repository, without its full metadata. + """ + +class ImageWithBlob(namedtuple('Image', ['image_id', 'blob', 'compat_metadata', 'repository', + 'internal_db_id', 'v1_metadata'])): + """ + ImageWithBlob represents a user-facing alias for referencing an image, along with its blob. + """ + +class Blob(namedtuple('Blob', ['uuid', 'size', 'uncompressed_size', 'uploading', 'locations'])): + """ + Blob represents an opaque binary blob saved to the storage system. + """ + +class TorrentInfo(namedtuple('TorrentInfo', ['piece_length', 'pieces'])): + """ + TorrentInfo represents the torrent piece information associated with a blob. + """ + + +@add_metaclass(ABCMeta) +class VerbsDataInterface(object): + """ + Interface that represents all data store interactions required by the registry's custom HTTP + verbs. + """ + @abstractmethod + def get_repository(self, namespace_name, repo_name): + """ + Returns a repository tuple for the repository with the given name under the given namespace. + Returns None if no such repository was found. + """ + pass + + @abstractmethod + def get_manifest_layers_with_blobs(self, repo_image): + """ + Returns the full set of manifest layers and their associated blobs starting at the given + repository image and working upwards to the root image. + """ + pass + + @abstractmethod + def get_blob_path(self, blob): + """ + Returns the storage path for the given blob. + """ + pass + + @abstractmethod + def get_derived_image_signature(self, derived_image, signer_name): + """ + Returns the signature associated with the derived image and a specific signer or None if none. + """ + pass + + @abstractmethod + def set_derived_image_signature(self, derived_image, signer_name, signature): + """ + Sets the calculated signature for the given derived image and signer to that specified. + """ + pass + + @abstractmethod + def delete_derived_image(self, derived_image): + """ + Deletes a derived image and all of its storage. + """ + pass + + @abstractmethod + def set_blob_size(self, blob, size): + """ + Sets the size field on a blob to the value specified. + """ + pass + + @abstractmethod + def get_repo_blob_by_digest(self, namespace_name, repo_name, digest): + """ + Returns the blob with the given digest under the matching repository or None if none. + """ + pass + + @abstractmethod + def get_torrent_info(self, blob): + """ + Returns the torrent information associated with the given blob or None if none. + """ + pass + + @abstractmethod + def set_torrent_info(self, blob, piece_length, pieces): + """ + Sets the torrent infomation associated with the given blob to that specified. + """ + pass + + @abstractmethod + def lookup_derived_image(self, repo_image, verb, varying_metadata=None): + """ + Looks up the derived image for the given repository image, verb and optional varying metadata + and returns it or None if none. + """ + pass + + @abstractmethod + def lookup_or_create_derived_image(self, repo_image, verb, location, varying_metadata=None): + """ + Looks up the derived image for the given repository image, verb and optional varying metadata + and returns it. If none exists, a new derived image is created. + """ + pass + + @abstractmethod + def get_tag_image(self, namespace_name, repo_name, tag_name): + """ + Returns the image associated with the live tag with the given name under the matching repository + or None if none. + """ + pass diff --git a/data/interfaces/verbs.py b/endpoints/verbs/models_pre_oci.py similarity index 61% rename from data/interfaces/verbs.py rename to endpoints/verbs/models_pre_oci.py index 6222f46b7..728e1f2ed 100644 --- a/data/interfaces/verbs.py +++ b/endpoints/verbs/models_pre_oci.py @@ -1,156 +1,17 @@ import json -from abc import ABCMeta, abstractmethod -from collections import namedtuple - -from six import add_metaclass - from data import model from image.docker.v1 import DockerV1Metadata - -class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', - 'is_public', 'kind'])): - """ - Repository represents a namespaced collection of tags. - :type id: int - :type name: string - :type namespace_name: string - :type description: string - :type is_public: bool - :type kind: string - """ - - -class DerivedImage(namedtuple('DerivedImage', ['ref', 'blob', 'internal_source_image_db_id'])): - """ - DerivedImage represents a user-facing alias for an image which was derived from another image. - """ - -class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])): - """ - RepositoryReference represents a reference to a Repository, without its full metadata. - """ - -class ImageWithBlob(namedtuple('Image', ['image_id', 'blob', 'compat_metadata', 'repository', - 'internal_db_id', 'v1_metadata'])): - """ - ImageWithBlob represents a user-facing alias for referencing an image, along with its blob. - """ - -class Blob(namedtuple('Blob', ['uuid', 'size', 'uncompressed_size', 'uploading', 'locations'])): - """ - Blob represents an opaque binary blob saved to the storage system. - """ - -class TorrentInfo(namedtuple('TorrentInfo', ['piece_length', 'pieces'])): - """ - TorrentInfo represents the torrent piece information associated with a blob. - """ - - -@add_metaclass(ABCMeta) -class VerbsDataInterface(object): - """ - Interface that represents all data store interactions required by the registry's custom HTTP - verbs. - """ - @abstractmethod - def get_repository(self, namespace_name, repo_name): - """ - Returns a repository tuple for the repository with the given name under the given namespace. - Returns None if no such repository was found. - """ - pass - - @abstractmethod - def get_manifest_layers_with_blobs(self, repo_image): - """ - Returns the full set of manifest layers and their associated blobs starting at the given - repository image and working upwards to the root image. - """ - pass - - @abstractmethod - def get_blob_path(self, blob): - """ - Returns the storage path for the given blob. - """ - pass - - @abstractmethod - def get_derived_image_signature(self, derived_image, signer_name): - """ - Returns the signature associated with the derived image and a specific signer or None if none. - """ - pass - - @abstractmethod - def set_derived_image_signature(self, derived_image, signer_name, signature): - """ - Sets the calculated signature for the given derived image and signer to that specified. - """ - pass - - @abstractmethod - def delete_derived_image(self, derived_image): - """ - Deletes a derived image and all of its storage. - """ - pass - - @abstractmethod - def set_blob_size(self, blob, size): - """ - Sets the size field on a blob to the value specified. - """ - pass - - @abstractmethod - def get_repo_blob_by_digest(self, namespace_name, repo_name, digest): - """ - Returns the blob with the given digest under the matching repository or None if none. - """ - pass - - @abstractmethod - def get_torrent_info(self, blob): - """ - Returns the torrent information associated with the given blob or None if none. - """ - pass - - @abstractmethod - def set_torrent_info(self, blob, piece_length, pieces): - """ - Sets the torrent infomation associated with the given blob to that specified. - """ - pass - - @abstractmethod - def lookup_derived_image(self, repo_image, verb, varying_metadata=None): - """ - Looks up the derived image for the given repository image, verb and optional varying metadata - and returns it or None if none. - """ - pass - - @abstractmethod - def lookup_or_create_derived_image(self, repo_image, verb, location, varying_metadata=None): - """ - Looks up the derived image for the given repository image, verb and optional varying metadata - and returns it. If none exists, a new derived image is created. - """ - pass - - @abstractmethod - def get_tag_image(self, namespace_name, repo_name, tag_name): - """ - Returns the image associated with the live tag with the given name under the matching repository - or None if none. - """ - pass - +from endpoints.verbs.models_interface import ( + Blob, + DerivedImage, + ImageWithBlob, + Repository, + RepositoryReference, + TorrentInfo, + VerbsDataInterface, +) class PreOCIModel(VerbsDataInterface): """ From ec81148d7373d3d0c1f55681c10f480ca823becc Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 Jun 2017 14:03:57 +0300 Subject: [PATCH 17/22] Add super basic security worker test --- workers/securityworker.py | 88 +++++++++++++++-------------- workers/test/test_securityworker.py | 9 +++ 2 files changed, 55 insertions(+), 42 deletions(-) create mode 100644 workers/test/test_securityworker.py diff --git a/workers/securityworker.py b/workers/securityworker.py index 487b2f537..fb59754ac 100644 --- a/workers/securityworker.py +++ b/workers/securityworker.py @@ -26,6 +26,49 @@ unscanned_images_gauge = prometheus.create_gauge('unscanned_images', max_unscanned_images_gauge = prometheus.create_gauge('max_unscanned_image_id', 'Max ID of the unscanned images.') +def index_images(min_id, target_version, analyzer): + def batch_query(): + return get_images_eligible_for_scan(target_version) + + # Get the ID of the last image we can analyze. Will be None if there are no images in the + # database. + max_id = get_max_id_for_sec_scan() + if max_id is None: + return None + + if min_id is None or min_id > max_id: + logger.info('Could not find any available images for scanning.') + return None + + max_unscanned_images_gauge.Set(max_id) + + # 4^log10(total) gives us a scalable batch size into the billions. + batch_size = int(4 ** log10(max(10, max_id - min_id))) + + with UseThenDisconnect(app.config): + to_scan_generator = yield_random_entries( + batch_query, + get_image_pk_field(), + batch_size, + max_id, + min_id, + ) + for candidate, abt, num_remaining in to_scan_generator: + try: + analyzer.analyze_recursively(candidate) + except PreemptedException: + logger.info('Another worker pre-empted us for layer: %s', candidate.id) + abt.set() + except APIRequestFailure: + logger.exception('Security scanner service unavailable') + return + + unscanned_images_gauge.Set(num_remaining) + + # If we reach this point, we analyzed every images up to max_id, next time the worker runs, + # we want to start from the next image. + return max_id + 1 + class SecurityWorker(Worker): def __init__(self): super(SecurityWorker, self).__init__() @@ -42,48 +85,9 @@ class SecurityWorker(Worker): self.add_operation(self._index_images, interval) def _index_images(self): - def batch_query(): - return get_images_eligible_for_scan(self._target_version) - - # Get the ID of the last image we can analyze. Will be None if there are no images in the - # database. - max_id = get_max_id_for_sec_scan() - if max_id is None: - return - - if self.min_id is None or self.min_id > max_id: - logger.info('Could not find any available images for scanning.') - return - - max_unscanned_images_gauge.Set(max_id) - - # 4^log10(total) gives us a scalable batch size into the billions. - batch_size = int(4 ** log10(max(10, max_id - self.min_id))) - - with UseThenDisconnect(app.config): - to_scan_generator = yield_random_entries( - batch_query, - get_image_pk_field(), - batch_size, - max_id, - self.min_id, - ) - for candidate, abt, num_remaining in to_scan_generator: - try: - self._analyzer.analyze_recursively(candidate) - except PreemptedException: - logger.info('Another worker pre-empted us for layer: %s', candidate.id) - abt.set() - except APIRequestFailure: - logger.exception('Security scanner service unavailable') - return - - unscanned_images_gauge.Set(num_remaining) - - # If we reach this point, we analyzed every images up to max_id, next time the worker runs, - # we want to start from the next image. - self.min_id = max_id + 1 - + new_min_id = index_images(self.min_id, self._target_version, self._analyzer) + if new_min_id is not None: + self.min_id = new_min_id @property def min_id(self): diff --git a/workers/test/test_securityworker.py b/workers/test/test_securityworker.py new file mode 100644 index 000000000..de3927450 --- /dev/null +++ b/workers/test/test_securityworker.py @@ -0,0 +1,9 @@ +from mock import patch, Mock + +from test.fixtures import * +from workers.securityworker import index_images + +def test_securityworker_realdb(initialized_db): + mock_analyzer = Mock() + assert index_images(0, 1, mock_analyzer) is not None + mock_analyzer.analyze_recursively.assert_called() From 1ddb09ac1133b39124705abc116f0593d7c0f46d Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Wed, 28 Jun 2017 14:50:52 +0300 Subject: [PATCH 18/22] Change security worker to use data interface --- conf/init/service/securityworker/run | 2 +- workers/securityworker.py | 119 ------------------ workers/securityworker/__init__.py | 35 ++++++ workers/securityworker/models_interface.py | 28 +++++ workers/securityworker/models_pre_oci.py | 52 ++++++++ workers/securityworker/securityworker.py | 47 +++++++ .../test/test_securityworker.py | 2 +- 7 files changed, 164 insertions(+), 121 deletions(-) delete mode 100644 workers/securityworker.py create mode 100644 workers/securityworker/__init__.py create mode 100644 workers/securityworker/models_interface.py create mode 100644 workers/securityworker/models_pre_oci.py create mode 100644 workers/securityworker/securityworker.py rename workers/{ => securityworker}/test/test_securityworker.py (80%) diff --git a/conf/init/service/securityworker/run b/conf/init/service/securityworker/run index c40f9aa4b..22a47af96 100755 --- a/conf/init/service/securityworker/run +++ b/conf/init/service/securityworker/run @@ -3,6 +3,6 @@ echo 'Starting security scanner worker' cd / -venv/bin/python -m workers.securityworker 2>&1 +venv/bin/python -m workers.securityworker.securityworker 2>&1 echo 'Security scanner worker exited' diff --git a/workers/securityworker.py b/workers/securityworker.py deleted file mode 100644 index fb59754ac..000000000 --- a/workers/securityworker.py +++ /dev/null @@ -1,119 +0,0 @@ -import logging.config -import time - -from math import log10 - -import features - -from app import app, secscan_api, prometheus -from workers.worker import Worker -from data.database import UseThenDisconnect -from data.model.image import (get_images_eligible_for_scan, get_image_pk_field, - get_max_id_for_sec_scan, get_min_id_for_sec_scan) -from util.secscan.api import SecurityConfigValidator, APIRequestFailure -from util.secscan.analyzer import LayerAnalyzer, PreemptedException -from util.migrate.allocator import yield_random_entries -from util.log import logfile_path -from endpoints.v2 import v2_bp - - -DEFAULT_INDEXING_INTERVAL = 30 - - -logger = logging.getLogger(__name__) -unscanned_images_gauge = prometheus.create_gauge('unscanned_images', - 'Number of images that clair needs to scan.') -max_unscanned_images_gauge = prometheus.create_gauge('max_unscanned_image_id', - 'Max ID of the unscanned images.') - -def index_images(min_id, target_version, analyzer): - def batch_query(): - return get_images_eligible_for_scan(target_version) - - # Get the ID of the last image we can analyze. Will be None if there are no images in the - # database. - max_id = get_max_id_for_sec_scan() - if max_id is None: - return None - - if min_id is None or min_id > max_id: - logger.info('Could not find any available images for scanning.') - return None - - max_unscanned_images_gauge.Set(max_id) - - # 4^log10(total) gives us a scalable batch size into the billions. - batch_size = int(4 ** log10(max(10, max_id - min_id))) - - with UseThenDisconnect(app.config): - to_scan_generator = yield_random_entries( - batch_query, - get_image_pk_field(), - batch_size, - max_id, - min_id, - ) - for candidate, abt, num_remaining in to_scan_generator: - try: - analyzer.analyze_recursively(candidate) - except PreemptedException: - logger.info('Another worker pre-empted us for layer: %s', candidate.id) - abt.set() - except APIRequestFailure: - logger.exception('Security scanner service unavailable') - return - - unscanned_images_gauge.Set(num_remaining) - - # If we reach this point, we analyzed every images up to max_id, next time the worker runs, - # we want to start from the next image. - return max_id + 1 - -class SecurityWorker(Worker): - def __init__(self): - super(SecurityWorker, self).__init__() - validator = SecurityConfigValidator(app.config) - if not validator.valid(): - logger.warning('Failed to validate security scan configuration') - return - - self._target_version = app.config.get('SECURITY_SCANNER_ENGINE_VERSION_TARGET', 3) - self._analyzer = LayerAnalyzer(app.config, secscan_api) - self._min_id = None - - interval = app.config.get('SECURITY_SCANNER_INDEXING_INTERVAL', DEFAULT_INDEXING_INTERVAL) - self.add_operation(self._index_images, interval) - - def _index_images(self): - new_min_id = index_images(self.min_id, self._target_version, self._analyzer) - if new_min_id is not None: - self.min_id = new_min_id - - @property - def min_id(self): - """ If it hasn't already been determined, finds the ID of the first image to be analyzed. - First checks the config, then the database, and returns None if there are no images - available for scanning. - """ - if self._min_id is None: - self._min_id = app.config.get('SECURITY_SCANNER_INDEXING_MIN_ID') - if self._min_id is None: - self._min_id = get_min_id_for_sec_scan(self._target_version) - return self._min_id - - @min_id.setter - def min_id(self, new_min_id): - self._min_id = new_min_id - - -if __name__ == '__main__': - app.register_blueprint(v2_bp, url_prefix='/v2') - - if not features.SECURITY_SCANNER: - logger.debug('Security scanner disabled; skipping SecurityWorker') - while True: - time.sleep(100000) - - logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) - worker = SecurityWorker() - worker.start() diff --git a/workers/securityworker/__init__.py b/workers/securityworker/__init__.py new file mode 100644 index 000000000..a101f6795 --- /dev/null +++ b/workers/securityworker/__init__.py @@ -0,0 +1,35 @@ +import logging.config + +from app import app, prometheus +from data.database import UseThenDisconnect +from workers.securityworker.models_pre_oci import pre_oci_model as model +from util.secscan.api import APIRequestFailure +from util.secscan.analyzer import PreemptedException + +logger = logging.getLogger(__name__) +unscanned_images_gauge = prometheus.create_gauge('unscanned_images', + 'Number of images that clair needs to scan.') + +def index_images(target_version, analyzer, token=None): + """ Performs security indexing of all images in the database not scanned at the target version. + If a token is provided, scanning will begin where the token indicates it previously completed. + """ + iterator, next_token = model.candidates_to_scan(target_version, start_token=token) + if iterator is None: + logger.debug('Found no additional images to scan') + return None + + with UseThenDisconnect(app.config): + for candidate, abt, num_remaining in iterator: + try: + analyzer.analyze_recursively(candidate) + except PreemptedException: + logger.info('Another worker pre-empted us for layer: %s', candidate.id) + abt.set() + except APIRequestFailure: + logger.exception('Security scanner service unavailable') + return + + unscanned_images_gauge.Set(num_remaining) + + return next_token diff --git a/workers/securityworker/models_interface.py b/workers/securityworker/models_interface.py new file mode 100644 index 000000000..6d872ca4e --- /dev/null +++ b/workers/securityworker/models_interface.py @@ -0,0 +1,28 @@ +from abc import ABCMeta, abstractmethod +from collections import namedtuple + +from six import add_metaclass + +class ScanToken(namedtuple('NextScanToken', ['min_id'])): + """ + ScanToken represents an opaque token that can be passed between runs of the security worker + to continue scanning whereever the previous run left off. Note that the data of the token is + *opaque* to the security worker, and the security worker should *not* pull any data out or modify + the token in any way. + """ + +@add_metaclass(ABCMeta) +class SecurityWorkerDataInterface(object): + """ + Interface that represents all data store interactions required by the security worker. + """ + + @abstractmethod + def candidates_to_scan(self, target_version, start_token=None): + """ + Returns a tuple consisting of an iterator of all the candidates to scan and a NextScanToken. + The iterator returns a tuple for each iteration consisting of the candidate Image, the abort + signal, and the number of remaining candidates. If the iterator returned is None, there are + no candidates to process. + """ + pass diff --git a/workers/securityworker/models_pre_oci.py b/workers/securityworker/models_pre_oci.py new file mode 100644 index 000000000..a665cac17 --- /dev/null +++ b/workers/securityworker/models_pre_oci.py @@ -0,0 +1,52 @@ +from math import log10 + +from app import app +from data.model.image import (get_images_eligible_for_scan, get_image_pk_field, + get_max_id_for_sec_scan, get_min_id_for_sec_scan) +from util.migrate.allocator import yield_random_entries + +from workers.securityworker.models_interface import ( + ScanToken, + SecurityWorkerDataInterface +) + +class PreOCIModel(SecurityWorkerDataInterface): + def candidates_to_scan(self, target_version, start_token=None): + def batch_query(): + return get_images_eligible_for_scan(target_version) + + # Find the minimum ID. + min_id = None + if start_token is not None: + min_id = start_token.min_id + else: + min_id = app.config.get('SECURITY_SCANNER_INDEXING_MIN_ID') + if min_id is None: + min_id = get_min_id_for_sec_scan(target_version) + + # Get the ID of the last image we can analyze. Will be None if there are no images in the + # database. + max_id = get_max_id_for_sec_scan() + if max_id is None: + return (None, None) + + if min_id is None or min_id > max_id: + return (None, None) + + # 4^log10(total) gives us a scalable batch size into the billions. + batch_size = int(4 ** log10(max(10, max_id - min_id))) + + # TODO: Once we have a clean shared NamedTuple for Images, send that to the secscan analyzer + # rather than the database Image itself. + iterator = yield_random_entries( + batch_query, + get_image_pk_field(), + batch_size, + max_id, + min_id, + ) + + return (iterator, ScanToken(max_id + 1)) + + +pre_oci_model = PreOCIModel() diff --git a/workers/securityworker/securityworker.py b/workers/securityworker/securityworker.py new file mode 100644 index 000000000..732631e3a --- /dev/null +++ b/workers/securityworker/securityworker.py @@ -0,0 +1,47 @@ +import logging.config +import time + +import features + +from app import app, secscan_api +from workers.worker import Worker +from workers.securityworker import index_images +from util.secscan.api import SecurityConfigValidator +from util.secscan.analyzer import LayerAnalyzer +from util.log import logfile_path +from endpoints.v2 import v2_bp + +logger = logging.getLogger(__name__) + +DEFAULT_INDEXING_INTERVAL = 30 + +class SecurityWorker(Worker): + def __init__(self): + super(SecurityWorker, self).__init__() + validator = SecurityConfigValidator(app.config) + if not validator.valid(): + logger.warning('Failed to validate security scan configuration') + return + + self._target_version = app.config.get('SECURITY_SCANNER_ENGINE_VERSION_TARGET', 3) + self._analyzer = LayerAnalyzer(app.config, secscan_api) + self._next_token = None + + interval = app.config.get('SECURITY_SCANNER_INDEXING_INTERVAL', DEFAULT_INDEXING_INTERVAL) + self.add_operation(self._index_images, interval) + + def _index_images(self): + self._next_token = index_images(self._target_version, self._analyzer, self._next_token) + + +if __name__ == '__main__': + app.register_blueprint(v2_bp, url_prefix='/v2') + + if not features.SECURITY_SCANNER: + logger.debug('Security scanner disabled; skipping SecurityWorker') + while True: + time.sleep(100000) + + logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) + worker = SecurityWorker() + worker.start() diff --git a/workers/test/test_securityworker.py b/workers/securityworker/test/test_securityworker.py similarity index 80% rename from workers/test/test_securityworker.py rename to workers/securityworker/test/test_securityworker.py index de3927450..4b52f8def 100644 --- a/workers/test/test_securityworker.py +++ b/workers/securityworker/test/test_securityworker.py @@ -5,5 +5,5 @@ from workers.securityworker import index_images def test_securityworker_realdb(initialized_db): mock_analyzer = Mock() - assert index_images(0, 1, mock_analyzer) is not None + assert index_images(1, mock_analyzer) is not None mock_analyzer.analyze_recursively.assert_called() From 1d2640e012243b36a3dbd2ad391c906588335d5b Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Wed, 28 Jun 2017 13:40:04 -0400 Subject: [PATCH 19/22] util.secscan.fake: add test for unexpected status --- test/test_secscan.py | 23 ++++++++++++++++++++++- util/secscan/fake.py | 14 ++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/test/test_secscan.py b/test/test_secscan.py index db466689a..95e74d53f 100644 --- a/test/test_secscan.py +++ b/test/test_secscan.py @@ -160,7 +160,7 @@ class TestSecurityScanner(unittest.TestCase): security_scanner.set_internal_error_layer_id(security_scanner.layer_id(layer)) analyzer = LayerAnalyzer(app.config, self.api) - with self.assertRaises(APIRequestFailure) as ctx: + with self.assertRaises(APIRequestFailure): analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest') @@ -185,6 +185,27 @@ class TestSecurityScanner(unittest.TestCase): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest') self.assertAnalyzed(layer, security_scanner, False, 1) + def test_analyze_layer_unexpected_status(self): + """ Tests that a response from a scanner with an unexpected status code fails correctly. """ + + layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest', include_storage=True) + self.assertFalse(layer.security_indexed) + self.assertEquals(-1, layer.security_indexed_engine) + + with fake_security_scanner() as security_scanner: + # Make is so trying to analyze the parent will fail with an error. + security_scanner.set_unexpected_status_layer_id(security_scanner.layer_id(layer.parent)) + + # Try to the layer and its parents, but with one request causing an error. + analyzer = LayerAnalyzer(app.config, self.api) + with self.assertRaises(APIRequestFailure): + analyzer.analyze_recursively(layer) + + # Make sure it isn't analyzed. + layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest') + self.assertAnalyzed(layer, security_scanner, False, -1) + + def test_analyze_layer_missing_parent_handled(self): """ Tests that a missing parent causes an automatic reanalysis, which succeeds. """ diff --git a/util/secscan/fake.py b/util/secscan/fake.py index 849d0c2ca..69fc30cc2 100644 --- a/util/secscan/fake.py +++ b/util/secscan/fake.py @@ -33,6 +33,7 @@ class FakeSecurityScanner(object): self.fail_layer_id = None self.internal_error_layer_id = None self.error_layer_id = None + self.unexpected_status_layer_id = None def set_ok_layer_id(self, ok_layer_id): """ Sets a layer ID that, if encountered when the analyze call is made, causes a 200 @@ -58,6 +59,12 @@ class FakeSecurityScanner(object): """ self.error_layer_id = error_layer_id + def set_unexpected_status_layer_id(self, layer_id): + """ Sets a layer ID that, if encountered when the analyze call is made, causes an HTTP 600 + to be raised. This is useful in testing the robustness of the to unknown status codes. + """ + self.unexpected_status_layer_id = layer_id + def has_layer(self, layer_id): """ Returns true if the layer with the given ID has been analyzed. """ return layer_id in self.layers @@ -252,6 +259,13 @@ class FakeSecurityScanner(object): 'content': json.dumps({'Error': {'Message': 'Some sort of error'}}), } + if layer['Name'] == self.unexpected_status_layer_id: + return { + 'status_code': 600, + 'content': json.dumps({'Error': {'Message': 'Some sort of error'}}), + } + + parent_id = layer.get('ParentName', None) parent_layer = None From 27ed3bedcca92419f580bf9909bf9b4e53627eae Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Thu, 29 Jun 2017 09:43:04 +0300 Subject: [PATCH 20/22] yapf --- workers/securityworker/__init__.py | 1 + workers/securityworker/models_interface.py | 2 ++ workers/securityworker/models_pre_oci.py | 11 ++++------- workers/securityworker/securityworker.py | 1 + workers/securityworker/test/test_securityworker.py | 1 + 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/workers/securityworker/__init__.py b/workers/securityworker/__init__.py index a101f6795..8c2bc44a7 100644 --- a/workers/securityworker/__init__.py +++ b/workers/securityworker/__init__.py @@ -10,6 +10,7 @@ logger = logging.getLogger(__name__) unscanned_images_gauge = prometheus.create_gauge('unscanned_images', 'Number of images that clair needs to scan.') + def index_images(target_version, analyzer, token=None): """ Performs security indexing of all images in the database not scanned at the target version. If a token is provided, scanning will begin where the token indicates it previously completed. diff --git a/workers/securityworker/models_interface.py b/workers/securityworker/models_interface.py index 6d872ca4e..76295a427 100644 --- a/workers/securityworker/models_interface.py +++ b/workers/securityworker/models_interface.py @@ -3,6 +3,7 @@ from collections import namedtuple from six import add_metaclass + class ScanToken(namedtuple('NextScanToken', ['min_id'])): """ ScanToken represents an opaque token that can be passed between runs of the security worker @@ -11,6 +12,7 @@ class ScanToken(namedtuple('NextScanToken', ['min_id'])): the token in any way. """ + @add_metaclass(ABCMeta) class SecurityWorkerDataInterface(object): """ diff --git a/workers/securityworker/models_pre_oci.py b/workers/securityworker/models_pre_oci.py index a665cac17..0115be908 100644 --- a/workers/securityworker/models_pre_oci.py +++ b/workers/securityworker/models_pre_oci.py @@ -5,10 +5,8 @@ from data.model.image import (get_images_eligible_for_scan, get_image_pk_field, get_max_id_for_sec_scan, get_min_id_for_sec_scan) from util.migrate.allocator import yield_random_entries -from workers.securityworker.models_interface import ( - ScanToken, - SecurityWorkerDataInterface -) +from workers.securityworker.models_interface import (ScanToken, SecurityWorkerDataInterface) + class PreOCIModel(SecurityWorkerDataInterface): def candidates_to_scan(self, target_version, start_token=None): @@ -34,7 +32,7 @@ class PreOCIModel(SecurityWorkerDataInterface): return (None, None) # 4^log10(total) gives us a scalable batch size into the billions. - batch_size = int(4 ** log10(max(10, max_id - min_id))) + batch_size = int(4**log10(max(10, max_id - min_id))) # TODO: Once we have a clean shared NamedTuple for Images, send that to the secscan analyzer # rather than the database Image itself. @@ -43,8 +41,7 @@ class PreOCIModel(SecurityWorkerDataInterface): get_image_pk_field(), batch_size, max_id, - min_id, - ) + min_id,) return (iterator, ScanToken(max_id + 1)) diff --git a/workers/securityworker/securityworker.py b/workers/securityworker/securityworker.py index 732631e3a..1038e7a04 100644 --- a/workers/securityworker/securityworker.py +++ b/workers/securityworker/securityworker.py @@ -15,6 +15,7 @@ logger = logging.getLogger(__name__) DEFAULT_INDEXING_INTERVAL = 30 + class SecurityWorker(Worker): def __init__(self): super(SecurityWorker, self).__init__() diff --git a/workers/securityworker/test/test_securityworker.py b/workers/securityworker/test/test_securityworker.py index 4b52f8def..dfa9ff490 100644 --- a/workers/securityworker/test/test_securityworker.py +++ b/workers/securityworker/test/test_securityworker.py @@ -3,6 +3,7 @@ from mock import patch, Mock from test.fixtures import * from workers.securityworker import index_images + def test_securityworker_realdb(initialized_db): mock_analyzer = Mock() assert index_images(1, mock_analyzer) is not None From d07cc91dc64df9c77e31788e8fbb3a23e4bfc4d6 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Thu, 29 Jun 2017 09:57:39 +0300 Subject: [PATCH 21/22] yapf --- endpoints/verbs/__init__.py | 49 +++++++++++++----------- endpoints/verbs/models_interface.py | 16 ++++++-- endpoints/verbs/models_pre_oci.py | 39 ++++++++----------- endpoints/verbs/test/test_security.py | 55 +++++++++++++++------------ 4 files changed, 84 insertions(+), 75 deletions(-) diff --git a/endpoints/verbs/__init__.py b/endpoints/verbs/__init__.py index 76ddc0498..93f863989 100644 --- a/endpoints/verbs/__init__.py +++ b/endpoints/verbs/__init__.py @@ -22,16 +22,14 @@ from util.http import exact_abort from util.registry.filelike import wrap_with_handler from util.registry.queuefile import QueueFile from util.registry.queueprocess import QueueProcess -from util.registry.torrent import (make_torrent, per_user_torrent_filename, public_torrent_filename, - PieceHasher) - +from util.registry.torrent import ( + make_torrent, per_user_torrent_filename, public_torrent_filename, PieceHasher) logger = logging.getLogger(__name__) verbs = Blueprint('verbs', __name__) license_validator.enforce_license_before_request(verbs) - LAYER_MIMETYPE = 'binary/octet-stream' @@ -60,7 +58,8 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers): logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path) yield current_image_stream - stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image, get_next_layer) + stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image, + get_next_layer) for handler_fn in handlers: stream = wrap_with_handler(stream, handler_fn) @@ -89,6 +88,7 @@ def _write_derived_image_to_storage(verb, derived_image, queue_file): """ Read from the generated stream and write it back to the storage engine. This method runs in a separate process. """ + def handle_exception(ex): logger.debug('Exception when building %s derived image %s: %s', verb, derived_image.ref, ex) @@ -139,8 +139,9 @@ def _torrent_for_blob(blob, is_public): torrent_file = make_torrent(name, webseed, blob.size, torrent_info.piece_length, torrent_info.pieces) - headers = {'Content-Type': 'application/x-bittorrent', - 'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)} + headers = { + 'Content-Type': 'application/x-bittorrent', + 'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)} return make_response(torrent_file, 200, headers) @@ -158,8 +159,7 @@ def _torrent_repo_verb(repo_image, tag, verb, **kwargs): abort(406) # Return the torrent. - repo = model.get_repository(repo_image.repository.namespace_name, - repo_image.repository.name) + repo = model.get_repository(repo_image.repository.namespace_name, repo_image.repository.name) repo_is_public = repo is not None and repo.is_public torrent = _torrent_for_blob(derived_image.blob, repo_is_public) @@ -229,15 +229,14 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker= metric_queue.repository_pull.Inc(labelvalues=[namespace, repository, verb, True]) # Lookup/create the derived image for the verb and repo image. - derived_image = model.lookup_or_create_derived_image(repo_image, verb, - storage.preferred_locations[0], - varying_metadata={'tag': tag}) + derived_image = model.lookup_or_create_derived_image( + repo_image, verb, storage.preferred_locations[0], varying_metadata={'tag': tag}) if not derived_image.blob.uploading: logger.debug('Derived %s image %s exists in storage', verb, derived_image.ref) derived_layer_path = model.get_blob_path(derived_image.blob) is_head_request = request.method == 'HEAD' - download_url = storage.get_direct_download_url(derived_image.blob.locations, derived_layer_path, - head=is_head_request) + download_url = storage.get_direct_download_url(derived_image.blob.locations, + derived_layer_path, head=is_head_request) if download_url: logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image.ref) return redirect(download_url) @@ -246,8 +245,9 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker= database.close_db_filter(None) logger.debug('Sending cached derived %s image %s', verb, derived_image.ref) - return send_file(storage.stream_read_file(derived_image.blob.locations, derived_layer_path), - mimetype=LAYER_MIMETYPE) + return send_file( + storage.stream_read_file(derived_image.blob.locations, derived_layer_path), + mimetype=LAYER_MIMETYPE) logger.debug('Building and returning derived %s image %s', verb, derived_image.ref) @@ -270,9 +270,12 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker= # and send the results to the client and storage. handlers = [hasher.update] args = (formatter, repo_image, tag, derived_image_id, handlers) - queue_process = QueueProcess(_open_stream, - 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max - args, finished=_store_metadata_and_cleanup) + queue_process = QueueProcess( + _open_stream, + 8 * 1024, + 10 * 1024 * 1024, # 8K/10M chunk/max + args, + finished=_store_metadata_and_cleanup) client_queue_file = QueueFile(queue_process.create_queue(), 'client') storage_queue_file = QueueFile(queue_process.create_queue(), 'storage') @@ -336,11 +339,13 @@ def get_aci_signature(server, namespace, repository, tag, os, arch): @route_show_if(features.ACI_CONVERSION) @anon_protect -@verbs.route('/aci/////aci///', methods=['GET', 'HEAD']) +@verbs.route('/aci/////aci///', methods=[ + 'GET', 'HEAD']) @process_auth def get_aci_image(server, namespace, repository, tag, os, arch): - return _repo_verb(namespace, repository, tag, 'aci', AppCImageFormatter(), - sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch) + return _repo_verb(namespace, repository, tag, 'aci', + AppCImageFormatter(), sign=True, checker=os_arch_checker(os, arch), os=os, + arch=arch) @anon_protect diff --git a/endpoints/verbs/models_interface.py b/endpoints/verbs/models_interface.py index 868b0e76f..0bb8fccac 100644 --- a/endpoints/verbs/models_interface.py +++ b/endpoints/verbs/models_interface.py @@ -3,8 +3,10 @@ from collections import namedtuple from six import add_metaclass -class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', - 'is_public', 'kind'])): + +class Repository( + namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', 'is_public', + 'kind'])): """ Repository represents a namespaced collection of tags. :type id: int @@ -21,22 +23,27 @@ class DerivedImage(namedtuple('DerivedImage', ['ref', 'blob', 'internal_source_i DerivedImage represents a user-facing alias for an image which was derived from another image. """ + class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])): """ RepositoryReference represents a reference to a Repository, without its full metadata. """ -class ImageWithBlob(namedtuple('Image', ['image_id', 'blob', 'compat_metadata', 'repository', - 'internal_db_id', 'v1_metadata'])): + +class ImageWithBlob( + namedtuple('Image', [ + 'image_id', 'blob', 'compat_metadata', 'repository', 'internal_db_id', 'v1_metadata'])): """ ImageWithBlob represents a user-facing alias for referencing an image, along with its blob. """ + class Blob(namedtuple('Blob', ['uuid', 'size', 'uncompressed_size', 'uploading', 'locations'])): """ Blob represents an opaque binary blob saved to the storage system. """ + class TorrentInfo(namedtuple('TorrentInfo', ['piece_length', 'pieces'])): """ TorrentInfo represents the torrent piece information associated with a blob. @@ -49,6 +56,7 @@ class VerbsDataInterface(object): Interface that represents all data store interactions required by the registry's custom HTTP verbs. """ + @abstractmethod def get_repository(self, namespace_name, repo_name): """ diff --git a/endpoints/verbs/models_pre_oci.py b/endpoints/verbs/models_pre_oci.py index 728e1f2ed..26a955603 100644 --- a/endpoints/verbs/models_pre_oci.py +++ b/endpoints/verbs/models_pre_oci.py @@ -10,8 +10,8 @@ from endpoints.verbs.models_interface import ( Repository, RepositoryReference, TorrentInfo, - VerbsDataInterface, -) + VerbsDataInterface,) + class PreOCIModel(VerbsDataInterface): """ @@ -27,13 +27,11 @@ class PreOCIModel(VerbsDataInterface): return _repository_for_repo(repo) def get_manifest_layers_with_blobs(self, repo_image): - repo_image_record = model.image.get_image_by_id(repo_image.repository.namespace_name, - repo_image.repository.name, - repo_image.image_id) + repo_image_record = model.image.get_image_by_id( + repo_image.repository.namespace_name, repo_image.repository.name, repo_image.image_id) - parents = model.image.get_parent_images_with_placements(repo_image.repository.namespace_name, - repo_image.repository.name, - repo_image_record) + parents = model.image.get_parent_images_with_placements( + repo_image.repository.namespace_name, repo_image.repository.name, repo_image_record) yield repo_image @@ -51,8 +49,7 @@ class PreOCIModel(VerbsDataInterface): compat_metadata=metadata, v1_metadata=_docker_v1_metadata(repo_image.repository.namespace_name, repo_image.repository.name, parent), - internal_db_id=parent.id, - ) + internal_db_id=parent.id,) def get_derived_image_signature(self, derived_image, signer_name): storage = model.storage.get_storage_by_uuid(derived_image.blob.uuid) @@ -100,8 +97,7 @@ class PreOCIModel(VerbsDataInterface): return TorrentInfo( pieces=torrent_info.pieces, - piece_length=torrent_info.piece_length, - ) + piece_length=torrent_info.piece_length,) def set_torrent_info(self, blob, piece_length, pieces): blob_record = model.storage.get_storage_by_uuid(blob.uuid) @@ -138,12 +134,10 @@ class PreOCIModel(VerbsDataInterface): repository=RepositoryReference( namespace_name=namespace_name, name=repo_name, - id=found.repository_id, - ), + id=found.repository_id,), compat_metadata=metadata, v1_metadata=_docker_v1_metadata(namespace_name, repo_name, found), - internal_db_id=found.id, - ) + internal_db_id=found.id,) pre_oci_model = PreOCIModel() @@ -168,8 +162,7 @@ def _docker_v1_metadata(namespace_name, repo_name, repo_image): # Note: These are not needed in verbs and are expensive to load, so we just skip them. content_checksum=None, - parent_image_id=None, - ) + parent_image_id=None,) def _derived_image(blob_record, repo_image): @@ -179,8 +172,7 @@ def _derived_image(blob_record, repo_image): return DerivedImage( ref=repo_image.internal_db_id, blob=_blob(blob_record), - internal_source_image_db_id=repo_image.internal_db_id, - ) + internal_source_image_db_id=repo_image.internal_db_id,) def _blob(blob_record): @@ -197,8 +189,8 @@ def _blob(blob_record): size=blob_record.image_size, uncompressed_size=blob_record.uncompressed_size, uploading=blob_record.uploading, - locations=locations, - ) + locations=locations,) + def _repository_for_repo(repo): """ Returns a Repository object representing the Pre-OCI data model repo instance given. """ @@ -208,5 +200,4 @@ def _repository_for_repo(repo): namespace_name=repo.namespace_user.username, description=repo.description, is_public=model.repository.is_repository_public(repo), - kind=model.repository.get_repo_kind_name(repo), - ) + kind=model.repository.get_repo_kind_name(repo),) diff --git a/endpoints/verbs/test/test_security.py b/endpoints/verbs/test/test_security.py index 5e53c68a8..eeb79c567 100644 --- a/endpoints/verbs/test/test_security.py +++ b/endpoints/verbs/test/test_security.py @@ -18,40 +18,45 @@ ACI_ARGS = { 'server': 'someserver', 'tag': 'fake', 'os': 'linux', - 'arch': 'x64', -} + 'arch': 'x64',} + @pytest.mark.parametrize('user', [ (0, None), (1, NO_ACCESS_USER), (2, READ_ACCESS_USER), (3, CREATOR_ACCESS_USER), - (4, ADMIN_ACCESS_USER), -]) -@pytest.mark.parametrize('endpoint,method,repository,single_repo_path,params,expected_statuses', [ - ('get_aci_signature', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), - ('get_aci_signature', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), - ('get_aci_signature', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), - ('get_aci_signature', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), + (4, ADMIN_ACCESS_USER),]) +@pytest.mark.parametrize( + 'endpoint,method,repository,single_repo_path,params,expected_statuses', + [ + ('get_aci_signature', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), + ('get_aci_signature', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_signature', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_signature', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), - # get_aci_image - ('get_aci_image', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), - ('get_aci_image', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), - ('get_aci_image', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), - ('get_aci_image', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), + # get_aci_image + ('get_aci_image', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)), + ('get_aci_image', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_image', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)), + ('get_aci_image', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)), - # get_squashed_tag - ('get_squashed_tag', 'GET', PUBLIC_REPO, False, dict(tag='fake'), (404, 404, 404, 404, 404)), - ('get_squashed_tag', 'GET', PRIVATE_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), - ('get_squashed_tag', 'GET', ORG_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), - ('get_squashed_tag', 'GET', ANOTHER_ORG_REPO, False, dict(tag='fake'), (403, 403, 403, 403, 404)), + # get_squashed_tag + ('get_squashed_tag', 'GET', PUBLIC_REPO, False, dict(tag='fake'), (404, 404, 404, 404, 404)), + ('get_squashed_tag', 'GET', PRIVATE_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), + ('get_squashed_tag', 'GET', ORG_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)), + ('get_squashed_tag', 'GET', ANOTHER_ORG_REPO, False, dict(tag='fake'), (403, 403, 403, 403, + 404)), - # get_tag_torrent - ('get_tag_torrent', 'GET', PUBLIC_REPO, True, dict(digest='sha256:1234'), (404, 404, 404, 404, 404)), - ('get_tag_torrent', 'GET', PRIVATE_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, 404)), - ('get_tag_torrent', 'GET', ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, 404)), - ('get_tag_torrent', 'GET', ANOTHER_ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 403, 403, 404)), -]) + # get_tag_torrent + ('get_tag_torrent', 'GET', PUBLIC_REPO, True, dict(digest='sha256:1234'), (404, 404, 404, 404, + 404)), + ('get_tag_torrent', 'GET', PRIVATE_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, + 404)), + ('get_tag_torrent', 'GET', ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403, + 404)), + ('get_tag_torrent', 'GET', ANOTHER_ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 403, + 403, 404)),]) def test_verbs_security(user, endpoint, method, repository, single_repo_path, params, expected_statuses, app, client): headers = {} From fcaf309ce69de100f61bd6a3ed2b1469077ce7d9 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Thu, 29 Jun 2017 10:50:59 +0300 Subject: [PATCH 22/22] Fix file I forgot to update --- image/appc/test/test_appc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/image/appc/test/test_appc.py b/image/appc/test/test_appc.py index de829b4e8..06f1e8a8d 100644 --- a/image/appc/test/test_appc.py +++ b/image/appc/test/test_appc.py @@ -1,7 +1,7 @@ import pytest from image.appc import DockerV1ToACIManifestTranslator -from data.interfaces.verbs import RepositoryReference, ImageWithBlob +from endpoints.verbs.models_interface import RepositoryReference, ImageWithBlob from util.dict_wrappers import JSONPathDict