mirror of
https://github.com/vbatts/sl-feeds.git
synced 2024-11-21 15:25:41 +00:00
changelog: a slackware ChangeLog parser
and ability to export to feeds Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
This commit is contained in:
parent
f522293398
commit
8e97e3d16f
14 changed files with 1750 additions and 535 deletions
28
LICENSE
Normal file
28
LICENSE
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
Copyright (c) 2016 Vincent Batts, Raleigh, NC, USA
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
15
README
15
README
|
@ -1,15 +0,0 @@
|
||||||
hack scripts
|
|
||||||
|
|
||||||
They accomplish going from ChangeLog.txt -> RSS feeds that folks can subscribed to.
|
|
||||||
Ultimately ending up at http://www.slackware.com/~vbatts/feeds/
|
|
||||||
|
|
||||||
These are a mess, but still work.
|
|
||||||
|
|
||||||
I wrote a utility called slack-utils (https://github.com/vbatts/slack-utils/)
|
|
||||||
with a ruby gem (https://rubygems.org/gems/slack-utils). The python script
|
|
||||||
(`./bin/changelog_http_poll.py`) is called by a crontab. Python walks the
|
|
||||||
changelog.txt, then fetches from the http mirror the modified time. If it http
|
|
||||||
has the new version, then run the ruby script (`./bin/gen_changlog_rss.rb`).
|
|
||||||
This is what parses the changelog and returns RSS. Which is then written to the
|
|
||||||
corresponding file for public consumption.
|
|
||||||
|
|
18
README.md
Normal file
18
README.md
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# sl-feeds
|
||||||
|
|
||||||
|
This is for proccessing Slackware ChangeLog.txt -> RSS feeds that folks can
|
||||||
|
subscribed to.
|
||||||
|
|
||||||
|
Ultimately ending up at http://www.slackware.com/~vbatts/feeds/
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go get github.com/vbatts/sl-feeds
|
||||||
|
```
|
||||||
|
|
||||||
|
crontab like:
|
||||||
|
|
||||||
|
```
|
||||||
|
0 */2 * * * ~/bin/sl-feeds -q || echo "$(date): failed to poll changelogs" | mail -s "[slackrss] changelog_http_poll failed $(date +%D)" me@example.com
|
||||||
|
```
|
|
@ -1,62 +0,0 @@
|
||||||
#!/home/vbatts/opt/bin/ruby
|
|
||||||
|
|
||||||
#require 'fileutils'
|
|
||||||
require 'logger'
|
|
||||||
require 'tempfile'
|
|
||||||
require 'stringio'
|
|
||||||
|
|
||||||
require 'rubygems'
|
|
||||||
require 'slackware'
|
|
||||||
require 'slackware/changelog/rss'
|
|
||||||
|
|
||||||
#include FileUtils
|
|
||||||
|
|
||||||
$LOG = Logger.new(STDERR)
|
|
||||||
$LOG.level = Logger::WARN
|
|
||||||
|
|
||||||
FEEDS_BASE_DIR = "/home/vbatts/public_html/feeds/"
|
|
||||||
#url = 'http://alphageek.dyndns.org/linux/slackware-packages/slack-13.1/ChangeLog.txt'
|
|
||||||
# Sun Feb 13 08:44:35 PST 2011
|
|
||||||
# new url
|
|
||||||
URL = 'http://alphageek.dyndns.org/mirrors/alphageek/slackware-%s/ChangeLog.txt'
|
|
||||||
|
|
||||||
VERSIONS = %w{ 14.0 14.1 }
|
|
||||||
|
|
||||||
def url(ver)
|
|
||||||
URL % ver
|
|
||||||
end
|
|
||||||
|
|
||||||
if ARGV.include?('-v')
|
|
||||||
$LOG.level = Logger::DEBUG
|
|
||||||
end
|
|
||||||
|
|
||||||
VERSIONS.each {|ver|
|
|
||||||
begin
|
|
||||||
#tmp_file = File.open("/tmp/vbatts/alpha_log-#{(rand*1000).to_i}.xxx", "w+")
|
|
||||||
tmp_file = Tempfile.new("alpha_log")
|
|
||||||
$LOG.debug('tmp_file') { tmp_file }
|
|
||||||
|
|
||||||
strio = StringIO.new()
|
|
||||||
$LOG.debug('created ') { strio }
|
|
||||||
|
|
||||||
buffer = `lynx -source #{url(ver)}`
|
|
||||||
$LOG.debug('buffer length') { buffer.length }
|
|
||||||
|
|
||||||
tmp_file.write(buffer)
|
|
||||||
tmp_file.flush
|
|
||||||
|
|
||||||
changelog = Slackware::ChangeLog.new(tmp_file.path)
|
|
||||||
changelog.parse
|
|
||||||
strio.write(changelog.to_rss(
|
|
||||||
:noimage => true,
|
|
||||||
:title => "alphageek's #{ver} ChangeLog",
|
|
||||||
:url => url(ver)))
|
|
||||||
ensure
|
|
||||||
strio.seek(0)
|
|
||||||
tmp_file.close
|
|
||||||
end
|
|
||||||
feed_file = File.open(FEEDS_BASE_DIR + "alphageek-#{ver}_ChangeLog.rss", "w+")
|
|
||||||
$LOG.debug('feed_file') { feed_file }
|
|
||||||
feed_file.write(strio.read())
|
|
||||||
feed_file.close
|
|
||||||
}
|
|
|
@ -1,194 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# Mon Oct 17 08:25:29 PDT 2011
|
|
||||||
# copyright 2011 Vincent Batts, Vienna, VA, USA
|
|
||||||
|
|
||||||
# switching from an inotify watcher, to an http poll
|
|
||||||
# since what lands on connie.slackware.com usually doesn't go public
|
|
||||||
# immediately
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import glob
|
|
||||||
import time
|
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timedelta
|
|
||||||
from time import mktime
|
|
||||||
import urllib2
|
|
||||||
import anydbm
|
|
||||||
|
|
||||||
DEFAULT_DB = os.path.join(os.getenv('HOME'), '.slackware_changelog.db')
|
|
||||||
DEFAULT_URL = "http://slackware.osuosl.org/"
|
|
||||||
SLACKWARE_DIR_PATH = "/mirrors/ftp.slackware.com/pub/slackware"
|
|
||||||
RSS_DIR_PATH = "/home/vbatts/public_html/feeds"
|
|
||||||
|
|
||||||
'''
|
|
||||||
slackware-12.2_ChangeLog.rss
|
|
||||||
/home/vbatts/public_html/feeds/slackware-10.1_patches_ChangeLog.rss
|
|
||||||
/home/vbatts/public_html/feeds/slackware-8.1_patches_ChangeLog.rss
|
|
||||||
>>> for i in c.slackware_versions(): print i
|
|
||||||
...
|
|
||||||
/mirrors/ftp.slackware.com/pub/slackware/slackware64-13.0/ChangeLog.txt
|
|
||||||
/mirrors/ftp.slackware.com/pub/slackware/slackware-8.1/ChangeLog.txt
|
|
||||||
/mirrors/ftp.slackware.com/pub/slackware/slackware64-13.37/ChangeLog.txt
|
|
||||||
/mirrors/ftp.slackware.com/pub/slackware/slackware-13.0/ChangeLog.txt
|
|
||||||
/mirrors/ftp.slackware.com/pub/sla
|
|
||||||
'''
|
|
||||||
|
|
||||||
def rss_files():
|
|
||||||
for item in glob.glob(RSS_DIR_PATH + "/*.rss"):
|
|
||||||
yield item
|
|
||||||
|
|
||||||
def rss_files_format(str):
|
|
||||||
if str.startswith(RSS_DIR_PATH + "/"):
|
|
||||||
str = str[len(RSS_DIR_PATH + "/"):]
|
|
||||||
if str.endswith(".rss"):
|
|
||||||
str = str[:-4]
|
|
||||||
str = str + '.txt'
|
|
||||||
return str.replace('_','/')
|
|
||||||
|
|
||||||
def rss_files_cleaned():
|
|
||||||
for i in rss_files():
|
|
||||||
yield rss_files_format(i)
|
|
||||||
|
|
||||||
def slackware_versions():
|
|
||||||
changes = glob.glob(SLACKWARE_DIR_PATH + "/*/ChangeLog.txt")
|
|
||||||
patches = glob.glob(SLACKWARE_DIR_PATH + "/*/patches/ChangeLog.txt")
|
|
||||||
for item in changes + patches:
|
|
||||||
yield item
|
|
||||||
|
|
||||||
def slackware_versions_format(str):
|
|
||||||
if str.startswith(SLACKWARE_DIR_PATH + "/"):
|
|
||||||
str = str[len(SLACKWARE_DIR_PATH + "/"):]
|
|
||||||
if str.endswith("/"):
|
|
||||||
str = str[:-1]
|
|
||||||
if str.startswith("/"):
|
|
||||||
str = str[1:]
|
|
||||||
if str.endswith(".txt"):
|
|
||||||
str = str[:-4]
|
|
||||||
return str.replace('/','_')
|
|
||||||
|
|
||||||
def slackware_versions_strip():
|
|
||||||
for i in slackware_versions():
|
|
||||||
yield i[len(SLACKWARE_DIR_PATH + "/"):]
|
|
||||||
|
|
||||||
def slackware_versions_rss():
|
|
||||||
for i in slackware_versions():
|
|
||||||
yield slackware_versions_format(i)
|
|
||||||
|
|
||||||
def process_changelog_rss(pathname):
|
|
||||||
if os.path.basename(pathname) == "ChangeLog.txt":
|
|
||||||
print "%f: proccessing %s" % (time.time(), pathname)
|
|
||||||
# XXX REPLACE ME!!
|
|
||||||
cmd = "/home/vbatts/opt/bin/ruby /home/vbatts/bin/gen_changlog_rss.rb %s" % pathname
|
|
||||||
print cmd
|
|
||||||
print os.system(cmd)
|
|
||||||
else:
|
|
||||||
print '[WARN] "%s" is not a ChangeLog.txt file' % pathname
|
|
||||||
|
|
||||||
def db_setup(name = DEFAULT_DB):
|
|
||||||
try:
|
|
||||||
return anydbm.open(name, 'c')
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def db_teardown(db):
|
|
||||||
try:
|
|
||||||
return db.close()
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def db_add_ts(db, key, val):
|
|
||||||
if type(val) == float:
|
|
||||||
db[key] = str(val)
|
|
||||||
if type(val) == datetime:
|
|
||||||
db[key] = str(unix_time(val))
|
|
||||||
return db[key]
|
|
||||||
|
|
||||||
def db_get_ts(db, key):
|
|
||||||
try:
|
|
||||||
return datetime.fromtimestamp(float(db[key]))
|
|
||||||
except KeyError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def unix_time(dt):
|
|
||||||
return mktime(dt.timetuple())+1e-6*dt.microsecond
|
|
||||||
|
|
||||||
def time_from_header(str):
|
|
||||||
return datetime.strptime(str, "%a, %d %b %Y %H:%M:%S %Z")
|
|
||||||
|
|
||||||
def get_remote_header(url, header):
|
|
||||||
try:
|
|
||||||
req = urllib2.Request(url)
|
|
||||||
resp = urllib2.urlopen(req)
|
|
||||||
return resp.headers.getheader(header)
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_remote_time_str(url):
|
|
||||||
return get_remote_header(url,"last-modified")
|
|
||||||
|
|
||||||
def get_remote_time(url):
|
|
||||||
time_str = get_remote_time_str(url)
|
|
||||||
if time_str:
|
|
||||||
return time_from_header(time_str)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_local_time(path):
|
|
||||||
try:
|
|
||||||
time_flt = os.stat(path).st_mtime
|
|
||||||
return datetime.fromtimestamp(time_flt)
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def main(args):
|
|
||||||
try:
|
|
||||||
db = db_setup()
|
|
||||||
if db == None:
|
|
||||||
print "ERROR: could not setup database at %s" % DEFAULT_DB
|
|
||||||
return 1
|
|
||||||
|
|
||||||
for i in slackware_versions_strip():
|
|
||||||
# i'm not going to worry about this file, right now
|
|
||||||
if i == 'slackware/ChangeLog.txt':
|
|
||||||
continue
|
|
||||||
|
|
||||||
rss_file_name = os.path.join(RSS_DIR_PATH,
|
|
||||||
slackware_versions_format(i) + ".rss")
|
|
||||||
rss_ts = get_local_time(rss_file_name)
|
|
||||||
curr_ts = get_local_time(os.path.join(SLACKWARE_DIR_PATH, i))
|
|
||||||
prev_ts = db_get_ts( db, "local_" + i)
|
|
||||||
|
|
||||||
# Go no further for this file
|
|
||||||
if curr_ts == prev_ts and os.path.exists(rss_file_name) and rss_ts > prev_ts:
|
|
||||||
print '[INFO] Local time of "%s" is same as the database has' % i
|
|
||||||
continue
|
|
||||||
|
|
||||||
db_add_ts( db, "local_" + i, curr_ts)
|
|
||||||
|
|
||||||
remote_ts = get_remote_time(DEFAULT_URL + i)
|
|
||||||
print '[INFO] inserting remote_%s: %s' % (i,remote_ts)
|
|
||||||
db_add_ts( db, "remote_" + i, remote_ts)
|
|
||||||
|
|
||||||
if prev_ts == None or (remote_ts - prev_ts) == timedelta(hours=7):
|
|
||||||
print '[INFO] local and remote ChangeLog times match'
|
|
||||||
if rss_ts == None:
|
|
||||||
print '[INFO] RSS file (%s) does not exist' % (rss_ts)
|
|
||||||
print '[INFO] Processing "%s"' % rss_file_name
|
|
||||||
process_changelog_rss(os.path.join(SLACKWARE_DIR_PATH, i))
|
|
||||||
elif prev_ts == None or rss_ts < prev_ts:
|
|
||||||
print '[INFO] RSS file (%s) is older than the ChangeLog (%s)' % (rss_ts, prev_ts)
|
|
||||||
print '[INFO] Processing "%s"' % rss_file_name
|
|
||||||
process_changelog_rss(os.path.join(SLACKWARE_DIR_PATH, i))
|
|
||||||
else:
|
|
||||||
print '[INFO] RSS seems current'
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.wait()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
db_teardown(db)
|
|
||||||
|
|
||||||
if __name__ == "__main__": sys.exit(main(sys.argv[1:]))
|
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import glob
|
|
||||||
import time
|
|
||||||
|
|
||||||
sys.path.insert(0, "/home/vbatts/opt/lib/python2.5/site-packages")
|
|
||||||
sys.path.insert(0, "/home/vbatts/opt/lib/python2.5")
|
|
||||||
import pyinotify
|
|
||||||
|
|
||||||
dir_path = "/mirrors/ftp.slackware.com/pub/slackware"
|
|
||||||
|
|
||||||
def process_changelog_rss(event):
|
|
||||||
if os.path.basename(event.pathname) == "ChangeLog.txt":
|
|
||||||
print "%f: proccessing %s" % (time.time(), event)
|
|
||||||
os.system("/home/vbatts/opt/bin/ruby /home/vbatts/bin/gen_changlog_rss.rb %s" % event.pathname)
|
|
||||||
|
|
||||||
def main(args):
|
|
||||||
wm = pyinotify.WatchManager()
|
|
||||||
|
|
||||||
notifier = pyinotify.Notifier(wm)
|
|
||||||
|
|
||||||
for dir in glob.glob(dir_path + "/*/"):
|
|
||||||
if os.path.exists(dir + "ChangeLog.txt"):
|
|
||||||
print "%f: Adding watch for %s" % (time.time(), dir)
|
|
||||||
wm.add_watch(dir, pyinotify.IN_MOVED_TO, rec=False, proc_fun=process_changelog_rss)
|
|
||||||
|
|
||||||
for dir in glob.glob(dir_path + "/*/patches/"):
|
|
||||||
print "%f: Adding watch for %s" % (time.time(), dir)
|
|
||||||
wm.add_watch(dir, pyinotify.IN_MOVED_TO, rec=False, proc_fun=process_changelog_rss)
|
|
||||||
|
|
||||||
#wm.add_watch("/home/vbatts/", pyinotify.IN_MOVED_TO, rec=False, proc_fun=process_changelog_rss)
|
|
||||||
|
|
||||||
notifier.loop()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": main(sys.argv[1:])
|
|
||||||
|
|
|
@ -1,155 +0,0 @@
|
||||||
#!/usr/bin/env ruby
|
|
||||||
# Sun Jan 23 11:30:53 PST 2011
|
|
||||||
# Created by vbatts, vbatts@hashbangbash.com
|
|
||||||
|
|
||||||
$PROGRAM_NAME = File.basename(__FILE__)
|
|
||||||
|
|
||||||
require 'find'
|
|
||||||
|
|
||||||
require 'rubygems'
|
|
||||||
require 'ruby-prof'
|
|
||||||
require 'slackware'
|
|
||||||
require 'slackware/changelog/rss'
|
|
||||||
require 'rb-inotify'
|
|
||||||
|
|
||||||
|
|
||||||
BASE_URL = "http://slackware.osuosl.org/"
|
|
||||||
MIRROR_BASE_DIR = "/mirrors/ftp.slackware.com/pub/slackware/"
|
|
||||||
FEEDS_BASE_DIR = "/home/vbatts/public_html/feeds/"
|
|
||||||
RE_REPO_NAME = Regexp.new(/slackware(\d{2})?-(\d+\.\d+|current)\/(patches)?\/?.*/)
|
|
||||||
|
|
||||||
def generate_new_if_none
|
|
||||||
files = []
|
|
||||||
|
|
||||||
Find.find(MIRROR_BASE_DIR) {|file|
|
|
||||||
relative_name = file.sub(MIRROR_BASE_DIR, "")
|
|
||||||
if File.basename(file) == "ChangeLog.txt"
|
|
||||||
if not(relative_name.include?("zipslack"))
|
|
||||||
files << relative_name
|
|
||||||
Find.prune
|
|
||||||
end
|
|
||||||
end
|
|
||||||
# putting this check *after* the one above,
|
|
||||||
# lets us get the patches directories too
|
|
||||||
# while still getting a bit of speed (1.5s)
|
|
||||||
if relative_name.split("/").count > 2
|
|
||||||
Find.prune
|
|
||||||
end
|
|
||||||
}
|
|
||||||
puts "%f: watching %d changelogs" % [Time.now.to_f, files.count]
|
|
||||||
files.each {|file|
|
|
||||||
m = RE_REPO_NAME.match file
|
|
||||||
if m[3].nil?
|
|
||||||
file_name = "%sslackware%s-%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2]]
|
|
||||||
else
|
|
||||||
file_name = "%sslackware%s-%s_%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2], m[3]]
|
|
||||||
end
|
|
||||||
unless File.exist?(file_name)
|
|
||||||
c_file = MIRROR_BASE_DIR + file
|
|
||||||
changelog = Slackware::ChangeLog.new(c_file, :version => m[2])
|
|
||||||
changelog.opts[:arch] = m[1] unless m[1].nil?
|
|
||||||
if m[3].nil?
|
|
||||||
changelog.opts[:url] = "%sslackware%s-%s/ChangeLog.txt" % [BASE_URL, m[1], m[2]]
|
|
||||||
feed = File.open( "%sslackware%s-%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2]], "w+")
|
|
||||||
else
|
|
||||||
changelog.opts[:url] = "%sslackware%s-%s/%s/ChangeLog.txt" % [BASE_URL, m[1], m[2], m[3]]
|
|
||||||
feed = File.open( "%sslackware%s-%s_%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2], m[3]], "w+")
|
|
||||||
end
|
|
||||||
changelog.parse
|
|
||||||
puts "%f: Making a first feed: %s" % [Time.now.to_f, feed.path]
|
|
||||||
feed << changelog.to_rss
|
|
||||||
feed.close
|
|
||||||
changelog = nil
|
|
||||||
end
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
def run_notifier
|
|
||||||
n = INotify::Notifier.new
|
|
||||||
dirs = Dir.glob(MIRROR_BASE_DIR + "*")
|
|
||||||
dirs.concat(Dir.glob(MIRROR_BASE_DIR + "*/patches/"))
|
|
||||||
dirs.each {|dir|
|
|
||||||
next unless File.exist?(File.join(dir, "ChangeLog.txt"))
|
|
||||||
puts "%f: working with %s" % [Time.now.to_f, dir]
|
|
||||||
n.watch(dir, :moved_to) {|mfile|
|
|
||||||
file_name = mfile.absolute_name
|
|
||||||
if File.basename(file_name) == "ChangeLog.txt"
|
|
||||||
puts "%f: looking into %s" % [Time.now.to_f, file_name]
|
|
||||||
match_data = RE_REPO_NAME.match(file_name)
|
|
||||||
|
|
||||||
unless match_data.nil?
|
|
||||||
changelog = Slackware::ChangeLog.new(file_name, :version => match_data[2])
|
|
||||||
changelog.opts[:arch] = match_data[1] unless match_data[1].nil?
|
|
||||||
|
|
||||||
if match_data[3].nil?
|
|
||||||
changelog.opts[:url] = "%sslackware%s-%s/ChangeLog.txt" % [
|
|
||||||
BASE_URL,
|
|
||||||
match_data[1],
|
|
||||||
match_data[2]
|
|
||||||
]
|
|
||||||
feed = File.open( "%sslackware%s-%s_ChangeLog.rss" % [
|
|
||||||
FEEDS_BASE_DIR,
|
|
||||||
match_data[1],
|
|
||||||
match_data[2]
|
|
||||||
], "w+")
|
|
||||||
else
|
|
||||||
changelog.opts[:url] = "%sslackware%s-%s/%s/ChangeLog.txt" % [
|
|
||||||
BASE_URL,
|
|
||||||
match_data[1],
|
|
||||||
match_data[2],
|
|
||||||
match_data[3]
|
|
||||||
]
|
|
||||||
feed = File.open( "%sslackware%s-%s_%s_ChangeLog.rss" % [
|
|
||||||
FEEDS_BASE_DIR,
|
|
||||||
match_data[1],
|
|
||||||
match_data[2],
|
|
||||||
match_data[3]
|
|
||||||
], "w+")
|
|
||||||
end
|
|
||||||
begin
|
|
||||||
changelog.parse
|
|
||||||
rescue StandardError => ex
|
|
||||||
puts "%f: %s" % [Time.now.to_f, ex.message]
|
|
||||||
puts "%f: %s" % [Time.now.to_f, file_name]
|
|
||||||
next
|
|
||||||
end
|
|
||||||
|
|
||||||
puts "%f: parsed %s to %s" % [Time.now.to_f, file_name, feed.path]
|
|
||||||
|
|
||||||
feed << changelog.to_rss
|
|
||||||
feed.close
|
|
||||||
changelog = nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
begin
|
|
||||||
n.run
|
|
||||||
rescue Interrupt
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
## Main
|
|
||||||
|
|
||||||
#generate_new_if_none()
|
|
||||||
begin
|
|
||||||
RubyProf.start
|
|
||||||
run_notifier()
|
|
||||||
ensure
|
|
||||||
result = RubyProf.stop
|
|
||||||
|
|
||||||
RubyProf.measure_mode = RubyProf::PROCESS_TIME
|
|
||||||
RubyProf.measure_mode = RubyProf::WALL_TIME
|
|
||||||
RubyProf.measure_mode = RubyProf::CPU_TIME
|
|
||||||
#RubyProf.measure_mode = RubyProf::ALLOCATIONS
|
|
||||||
#RubyProf.measure_mode = RubyProf::MEMORY
|
|
||||||
#RubyProf.measure_mode = RubyProf::GC_RUNS
|
|
||||||
#RubyProf.measure_mode = RubyProf::GC_TIME
|
|
||||||
|
|
||||||
output_file_name = File.join(ENV["HOME"],"%s-%s%s" % [Time.now.to_i.to_s,File.basename(__FILE__),".log"])
|
|
||||||
output_file = File.open(output_file_name, "w+")
|
|
||||||
printer = RubyProf::FlatPrinter.new(result)
|
|
||||||
printer.print(output_file,0)
|
|
||||||
puts "%f: %s written" % [Time.now.to_f, output_file_name]
|
|
||||||
output_file.close
|
|
||||||
end
|
|
|
@ -1,68 +0,0 @@
|
||||||
#!/home/vbatts/opt/bin/ruby
|
|
||||||
|
|
||||||
require 'logger'
|
|
||||||
|
|
||||||
$log = Logger.new(STDERR)
|
|
||||||
$log.level = Logger::DEBUG
|
|
||||||
|
|
||||||
# put this in a loader function, because the
|
|
||||||
# rss library is SOO SLOW to load. we don't want to load it,
|
|
||||||
# if the script is going to fail early.
|
|
||||||
def load_libs()
|
|
||||||
require 'rubygems'
|
|
||||||
require 'slackware'
|
|
||||||
require 'slackware/changelog/rss'
|
|
||||||
require 'rb-inotify'
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
BASE_URL = "http://slackware.osuosl.org/"
|
|
||||||
FEEDS_BASE_DIR = "/home/vbatts/public_html/feeds/"
|
|
||||||
RE_REPO_NAME = Regexp.new(/slackware(\d{2})?-(\d+\.\d+|current)\/(patches)?\/?.*/)
|
|
||||||
|
|
||||||
def gen_file(file)
|
|
||||||
m = RE_REPO_NAME.match file
|
|
||||||
if m[3].nil?
|
|
||||||
file_name = "%sslackware%s-%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2]]
|
|
||||||
else
|
|
||||||
file_name = "%sslackware%s-%s_%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2], m[3]]
|
|
||||||
end
|
|
||||||
|
|
||||||
if File.exist?(file_name)
|
|
||||||
if File.mtime(file) < File.mtime(file_name)
|
|
||||||
printf("%f: INFO: %s is newer than %s\n", Time.now, file, file_name)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
changelog = Slackware::ChangeLog.new(file) #, :version => m[2])
|
|
||||||
opts = Hash.new
|
|
||||||
opts[:arch] = m[1] unless m[1].nil?
|
|
||||||
if m[3].nil?
|
|
||||||
opts[:url] = "%sslackware%s-%s/ChangeLog.txt" % [BASE_URL, m[1], m[2]]
|
|
||||||
feed = File.open( "%sslackware%s-%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2]], "w+")
|
|
||||||
else
|
|
||||||
opts[:url] = "%sslackware%s-%s/%s/ChangeLog.txt" % [BASE_URL, m[1], m[2], m[3]]
|
|
||||||
feed = File.open( "%sslackware%s-%s_%s_ChangeLog.rss" % [FEEDS_BASE_DIR, m[1], m[2], m[3]], "w+")
|
|
||||||
end
|
|
||||||
changelog.parse
|
|
||||||
printf("%f: INFO: generating feed: %s\n", Time.now.to_f, feed.path)
|
|
||||||
feed << changelog.to_rss(opts)
|
|
||||||
feed.close
|
|
||||||
changelog = nil
|
|
||||||
end
|
|
||||||
|
|
||||||
if ARGV.count == 0
|
|
||||||
$log.error("#{Time.now}: ERROR: ChangeLog.txt files must be passed\n")
|
|
||||||
exit(2)
|
|
||||||
else
|
|
||||||
load_libs()
|
|
||||||
for file in ARGV
|
|
||||||
if File.exist?(file)
|
|
||||||
gen_file(file)
|
|
||||||
else
|
|
||||||
$log.warn("#{Time.now}: WARN: #{file} does not exist\n")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# vim: set sts=2 sw=2 et ai:
|
|
51
changelog/feeds.go
Normal file
51
changelog/feeds.go
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
package changelog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gorilla/feeds"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ToFeed produces a github.com/gorilla/feeds.Feed that can be written to Atom or Rss
|
||||||
|
func ToFeed(link string, entries []Entry) (*feeds.Feed, error) {
|
||||||
|
var newestEntryTime time.Time
|
||||||
|
var oldestEntryTime time.Time
|
||||||
|
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.Date.After(newestEntryTime) {
|
||||||
|
newestEntryTime = e.Date
|
||||||
|
}
|
||||||
|
if e.Date.Before(oldestEntryTime) {
|
||||||
|
oldestEntryTime = e.Date
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
feed := &feeds.Feed{
|
||||||
|
Title: "",
|
||||||
|
Link: &feeds.Link{Href: link},
|
||||||
|
Description: "Generated ChangeLog.txt feeds by sl-feeds (github.com/vbatts/sl-feeds)",
|
||||||
|
Created: oldestEntryTime,
|
||||||
|
Updated: newestEntryTime,
|
||||||
|
}
|
||||||
|
feed.Items = make([]*feeds.Item, len(entries))
|
||||||
|
for i, e := range entries {
|
||||||
|
feed.Items[i] = &feeds.Item{
|
||||||
|
Created: e.Date,
|
||||||
|
Link: &feeds.Link{Href: ""},
|
||||||
|
Description: e.ToChangeLog(),
|
||||||
|
}
|
||||||
|
|
||||||
|
updateWord := "updates"
|
||||||
|
if len(e.Updates) == 1 {
|
||||||
|
updateWord = "update"
|
||||||
|
}
|
||||||
|
if e.SecurityFix() {
|
||||||
|
feed.Items[i].Title = fmt.Sprintf("%d %s. Including a %s!", len(e.Updates), updateWord, securityFixStr)
|
||||||
|
} else {
|
||||||
|
feed.Items[i].Title = fmt.Sprintf("%d %s.", len(e.Updates), updateWord)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return feed, nil
|
||||||
|
}
|
38
changelog/feeds_test.go
Normal file
38
changelog/feeds_test.go
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package changelog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFeed(t *testing.T) {
|
||||||
|
fh, err := os.Open("testdata/ChangeLog.txt")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
|
||||||
|
e, err := Parse(fh)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := ToFeed("http://slackware.osuosl.org/slackware64-current/ChangeLog.txt", e)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rss, err := f.ToRss()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
//println(rss)
|
||||||
|
if len(rss) == 0 {
|
||||||
|
t.Error("rss output is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := f.WriteRss(ioutil.Discard); err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
}
|
124
changelog/parse.go
Normal file
124
changelog/parse.go
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
package changelog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
dividerStr = `+--------------------------+`
|
||||||
|
securityFixStr = `(* Security fix *)`
|
||||||
|
dayPat = `^(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s.*\d{4}$`
|
||||||
|
updatePat = `^([a-z].*/.*): (Added|Rebuilt|Removed|Updated|Upgraded)\.$`
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
dayReg = regexp.MustCompile(dayPat)
|
||||||
|
updateReg = regexp.MustCompile(updatePat)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Parse takes in a slackware ChangeLog.txt and returns its collections of Entries
|
||||||
|
func Parse(r io.Reader) ([]Entry, error) {
|
||||||
|
buf := bufio.NewReader(r)
|
||||||
|
entries := []Entry{}
|
||||||
|
curEntry := Entry{}
|
||||||
|
var curUpdate *Update
|
||||||
|
for {
|
||||||
|
line, err := buf.ReadString('\n')
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
isEOF := err == io.EOF
|
||||||
|
trimmedline := strings.TrimSuffix(line, "\n")
|
||||||
|
|
||||||
|
if trimmedline == dividerStr {
|
||||||
|
if curUpdate != nil {
|
||||||
|
curEntry.Updates = append(curEntry.Updates, *curUpdate)
|
||||||
|
curUpdate = nil
|
||||||
|
}
|
||||||
|
entries = append(entries, curEntry)
|
||||||
|
if isEOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
curEntry = Entry{}
|
||||||
|
} else if dayReg.MatchString(trimmedline) {
|
||||||
|
// this date means it is the beginning of an entry
|
||||||
|
t, err := time.Parse(time.UnixDate, trimmedline)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
curEntry.Date = t
|
||||||
|
} else if updateReg.MatchString(trimmedline) {
|
||||||
|
// match on whether this is an update line
|
||||||
|
if curUpdate != nil {
|
||||||
|
curEntry.Updates = append(curEntry.Updates, *curUpdate)
|
||||||
|
curUpdate = nil
|
||||||
|
}
|
||||||
|
m := updateReg.FindStringSubmatch(trimmedline)
|
||||||
|
curUpdate = &Update{
|
||||||
|
Name: m[1],
|
||||||
|
Action: m[2],
|
||||||
|
}
|
||||||
|
} else if curUpdate != nil && strings.HasPrefix(trimmedline, " ") {
|
||||||
|
curUpdate.Comment = curUpdate.Comment + line
|
||||||
|
} else {
|
||||||
|
// Everything else is a comment on the Entry
|
||||||
|
curEntry.Comment = curEntry.Comment + line
|
||||||
|
}
|
||||||
|
|
||||||
|
if isEOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return entries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Entry is an section of updates (or release comments) in a ChangeLog.txt
|
||||||
|
type Entry struct {
|
||||||
|
Date time.Time
|
||||||
|
Comment string
|
||||||
|
Updates []Update
|
||||||
|
}
|
||||||
|
|
||||||
|
// SecurityFix is whether an update in this ChangeLog Entry includes a SecurityFix
|
||||||
|
func (e Entry) SecurityFix() bool {
|
||||||
|
for _, u := range e.Updates {
|
||||||
|
if u.SecurityFix() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToChangeLog reformats the struct as the text for ChangeLog.txt output
|
||||||
|
func (e Entry) ToChangeLog() string {
|
||||||
|
str := e.Date.Format(time.UnixDate) + "\n"
|
||||||
|
if strings.Trim(e.Comment, " \n") != "" {
|
||||||
|
str = str + e.Comment
|
||||||
|
}
|
||||||
|
for _, u := range e.Updates {
|
||||||
|
str = str + u.ToChangeLog()
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update is a package or component that is updated in a ChangeLog Entry
|
||||||
|
type Update struct {
|
||||||
|
Name string
|
||||||
|
Action string
|
||||||
|
Comment string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SecurityFix that this update is a security fix (that the comment includes `(* Security fix *)`)
|
||||||
|
func (u Update) SecurityFix() bool {
|
||||||
|
return strings.Contains(u.Comment, securityFixStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToChangeLog reformats the struct as the text for ChangeLog.txt output
|
||||||
|
func (u Update) ToChangeLog() string {
|
||||||
|
return fmt.Sprintf("%s: %s.\n%s", u.Name, u.Action, u.Comment)
|
||||||
|
}
|
61
changelog/parse_test.go
Normal file
61
changelog/parse_test.go
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
package changelog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParse(t *testing.T) {
|
||||||
|
fh, err := os.Open("testdata/ChangeLog.txt")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
|
||||||
|
e, err := Parse(fh)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we got all the entries
|
||||||
|
expectedLen := 52
|
||||||
|
if len(e) != expectedLen {
|
||||||
|
t.Errorf("expected %d entries; got %d", expectedLen, len(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we got as many security fix entries as expected
|
||||||
|
expectedSec := 34
|
||||||
|
secCount := 0
|
||||||
|
for i := range e {
|
||||||
|
if e[i].SecurityFix() {
|
||||||
|
secCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if secCount != expectedSec {
|
||||||
|
t.Errorf("expected %d security fix entries; got %d", expectedSec, secCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we got as many individual updates as expected
|
||||||
|
expectedUp := 597
|
||||||
|
upCount := 0
|
||||||
|
for i := range e {
|
||||||
|
upCount += len(e[i].Updates)
|
||||||
|
}
|
||||||
|
if upCount != expectedUp {
|
||||||
|
t.Errorf("expected %d updates across the entries; got %d", expectedUp, upCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure the top comment of an entry is working
|
||||||
|
foundWorkmanComment := false
|
||||||
|
expectedComment := "Thanks to Robby Workman for most of these updates."
|
||||||
|
for i := range e {
|
||||||
|
foundWorkmanComment = strings.Contains(e[i].Comment, expectedComment)
|
||||||
|
if foundWorkmanComment {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !foundWorkmanComment {
|
||||||
|
t.Errorf("expected to find an Entry with comment %q", expectedComment)
|
||||||
|
}
|
||||||
|
}
|
1430
changelog/testdata/ChangeLog.txt
vendored
Normal file
1430
changelog/testdata/ChangeLog.txt
vendored
Normal file
File diff suppressed because it is too large
Load diff
2
crontab
2
crontab
|
@ -1,2 +0,0 @@
|
||||||
#0 2 * * * ~/opt/bin/ruby ~/bin/changelog_alphageek.rb > /dev/null || echo "$(date): failed aphageek" | mail -s "[slackagg] alphageek's changelog failed $(date +%D)" vbatts@hashbangbash.com
|
|
||||||
0 */2 * * * python ~/bin/changelog_http_poll.py >/dev/null || echo "$(date): failed to poll changelogs" | mail -s "[slackrss] changelog_http_poll failed $(date +%D)" vbatts@hashbangbash.com
|
|
Loading…
Reference in a new issue