Merge branch 'prerender'
This commit is contained in:
commit
3f4d49213c
14 changed files with 179 additions and 6 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,3 +1,4 @@
|
|||
*.pyc
|
||||
venv
|
||||
.elasticbeanstalk/
|
||||
static/snapshots/
|
20
README.md
20
README.md
|
@ -1,4 +1,24 @@
|
|||
to prepare a new host:
|
||||
|
||||
```
|
||||
sudo apt-add-repository -y ppa:nginx/stable
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y git python-virtualenv python-dev phantomjs
|
||||
sudo apt-get install -y nginx-full
|
||||
```
|
||||
|
||||
check out the code:
|
||||
|
||||
```
|
||||
git clone https://bitbucket.org/yackob03/quay.git
|
||||
virtualenv --distribute venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
running:
|
||||
|
||||
```
|
||||
sudo nginx -c `pwd`/nginx.conf
|
||||
STACK=prod gunicorn -D --workers 4 -b unix:/tmp/gunicorn.sock --worker-class eventlet -t 500 application:application
|
||||
```
|
|
@ -43,6 +43,10 @@ http {
|
|||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
if ($args ~ "_escaped_fragment_") {
|
||||
rewrite ^ /static/snapshots$uri/index.html;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /home/ubuntu/quay/static/;
|
||||
|
|
|
@ -10,4 +10,5 @@ pymysql
|
|||
stripe
|
||||
gunicorn
|
||||
eventlet
|
||||
mixpanel-py
|
||||
mixpanel-py
|
||||
beautifulsoup4
|
|
@ -7,6 +7,7 @@ MarkupSafe==0.18
|
|||
PyMySQL==0.5
|
||||
Werkzeug==0.9.4
|
||||
argparse==1.2.1
|
||||
beautifulsoup4==4.3.2
|
||||
blinker==1.3
|
||||
boto==2.13.3
|
||||
distribute==0.6.34
|
||||
|
|
13
seo-snapshots/README.md
Normal file
13
seo-snapshots/README.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
Follow the instructions to set up a host of the whole project before attempting to run.
|
||||
|
||||
to run once:
|
||||
|
||||
```
|
||||
python make_snapshot.py
|
||||
```
|
||||
|
||||
cron line to update every 30 minutes:
|
||||
|
||||
```
|
||||
0,30 * * * * cd /home/ubuntu/quay/seo-snapshots && ../venv/bin/python make_snapshot.py
|
||||
```
|
60
seo-snapshots/crawl.py
Normal file
60
seo-snapshots/crawl.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
import subprocess
|
||||
import urllib
|
||||
import os
|
||||
import logging
|
||||
import codecs
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from Queue import Queue
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
BASE_URL = 'http://localhost:5000'
|
||||
OUTPUT_PATH = 'snapshots/'
|
||||
|
||||
aware_of = set()
|
||||
crawl_queue = Queue()
|
||||
|
||||
def crawl_url(url):
|
||||
final_url = BASE_URL + url
|
||||
to_write = OUTPUT_PATH + url + 'index.html'
|
||||
|
||||
logger.info('Snapshotting url: %s -> %s' % (final_url, to_write))
|
||||
|
||||
out_html = subprocess.check_output(['phantomjs', '--ignore-ssl-errors=yes',
|
||||
'phantomjs-runner.js', final_url])
|
||||
|
||||
# Remove script tags
|
||||
soup = BeautifulSoup(out_html)
|
||||
to_extract = soup.findAll('script')
|
||||
for item in to_extract:
|
||||
item.extract()
|
||||
|
||||
# Find all links and add them to the crawl queue
|
||||
for link in soup.findAll('a'):
|
||||
to_add = link.get('href')
|
||||
|
||||
if to_add not in aware_of and to_add.startswith('/'):
|
||||
logger.info('Adding link to be crawled: %s' % to_add)
|
||||
crawl_queue.put(to_add)
|
||||
aware_of.add(to_add)
|
||||
|
||||
to_write_dir = os.path.dirname(to_write)
|
||||
|
||||
if not os.path.exists(to_write_dir):
|
||||
os.makedirs(to_write_dir)
|
||||
|
||||
with codecs.open(to_write, 'w', 'utf-8') as output_file:
|
||||
output_file.write(soup.prettify())
|
||||
|
||||
# Seed the crawler
|
||||
crawl_queue.put('/')
|
||||
aware_of.add('/')
|
||||
|
||||
# Crawl
|
||||
while not crawl_queue.empty():
|
||||
to_crawl = crawl_queue.get()
|
||||
crawl_url(to_crawl)
|
45
seo-snapshots/make_snapshot.py
Normal file
45
seo-snapshots/make_snapshot.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
import subprocess
|
||||
import urllib
|
||||
import os
|
||||
import logging
|
||||
import codecs
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
BASE_URL = 'https://localhost/'
|
||||
OUTPUT_PATH = '../static/snapshots/'
|
||||
|
||||
URLS = [
|
||||
'',
|
||||
'guide/',
|
||||
'plans/',
|
||||
'repository/',
|
||||
]
|
||||
|
||||
for url in URLS:
|
||||
final_url = BASE_URL + url
|
||||
to_write = OUTPUT_PATH + url + 'index.html'
|
||||
|
||||
logger.info('Snapshotting url: %s -> %s' % (final_url, to_write))
|
||||
|
||||
out_html = subprocess.check_output(['phantomjs', '--ignore-ssl-errors=yes',
|
||||
'phantomjs-runner.js', final_url])
|
||||
|
||||
# Remove script tags
|
||||
soup = BeautifulSoup(out_html)
|
||||
to_extract = soup.findAll('script')
|
||||
for item in to_extract:
|
||||
item.extract()
|
||||
|
||||
to_write_dir = os.path.dirname(to_write)
|
||||
|
||||
if not os.path.exists(to_write_dir):
|
||||
os.makedirs(to_write_dir)
|
||||
|
||||
with codecs.open(to_write, 'w', 'utf-8') as output_file:
|
||||
output_file.write(soup.prettify())
|
23
seo-snapshots/phantomjs-runner.js
Normal file
23
seo-snapshots/phantomjs-runner.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
var system = require('system');
|
||||
var url = system.args[1] || '';
|
||||
if(url.length > 0) {
|
||||
var page = require('webpage').create();
|
||||
page.open(url, function (status) {
|
||||
if (status == 'success') {
|
||||
var delay, checker = (function() {
|
||||
var html = page.evaluate(function () {
|
||||
var ready = document.getElementsByClassName('ready-indicator')[0];
|
||||
if(ready.getAttribute('data-status') == 'ready') {
|
||||
return document.getElementsByTagName('html')[0].outerHTML;
|
||||
}
|
||||
});
|
||||
if(html) {
|
||||
clearTimeout(delay);
|
||||
console.log(html);
|
||||
phantom.exit();
|
||||
}
|
||||
});
|
||||
delay = setInterval(checker, 100);
|
||||
}
|
||||
});
|
||||
}
|
|
@ -91,9 +91,12 @@ function PlansCtrl($scope, UserService, PlanService) {
|
|||
$('#signinModal').modal({});
|
||||
}
|
||||
};
|
||||
|
||||
$scope.status = 'ready';
|
||||
}
|
||||
|
||||
function GuideCtrl($scope, Restangular) {
|
||||
function GuideCtrl($scope) {
|
||||
$scope.status = 'ready';
|
||||
}
|
||||
|
||||
function RepoListCtrl($scope, Restangular, UserService) {
|
||||
|
@ -194,6 +197,8 @@ function LandingCtrl($scope, $timeout, Restangular, UserService, KeyService) {
|
|||
$scope.loadingmyrepos = false;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.status = 'ready';
|
||||
}
|
||||
|
||||
function RepoCtrl($scope, Restangular, $routeParams, $rootScope) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<div class="container">
|
||||
<div class="container ready-indicator" data-status="{{ status }}">
|
||||
<div class="alert alert-warning">Warning: Quay requires docker version 0.6.2 or higher to work</div>
|
||||
|
||||
<h2>Getting started guide</h2>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<div class="jumbotron landing">
|
||||
<div class="jumbotron landing ready-indicator" data-status="{{ status }}">
|
||||
<div class="container">
|
||||
<div class="row messages">
|
||||
<div class="col-md-7">
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<div class="container plans">
|
||||
<div class="container plans ready-indicator" data-status="{{ status }}">
|
||||
<div class="callout">
|
||||
Plans & Pricing
|
||||
</div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<i class="icon-spinner icon-spin icon-3x"></i>
|
||||
</div>
|
||||
|
||||
<div class="container" ng-show="!loading">
|
||||
<div class="container ready-indicator" ng-show="!loading" data-status="{{ loading ? '' : 'ready' }}">
|
||||
<div class="repo-list" ng-show="!user.anonymous">
|
||||
<h3>Your Repositories</h3>
|
||||
<div ng-show="private_repositories.length > 0">
|
||||
|
|
Reference in a new issue