1
0
Fork 0
mirror of https://github.com/vbatts/imgsrv.git synced 2024-11-27 10:35:41 +00:00

all gfs calls are inside dbutil now

This commit is contained in:
Vincent Batts 2013-10-03 22:49:57 -04:00
parent c737976121
commit cebd3acc3c
2 changed files with 90 additions and 40 deletions

View file

@ -5,15 +5,32 @@ import (
"github.com/vbatts/imgsrv/types" "github.com/vbatts/imgsrv/types"
"labix.org/v2/mgo" "labix.org/v2/mgo"
"labix.org/v2/mgo/bson" "labix.org/v2/mgo/bson"
"strings" "strings"
) )
type Util struct { type Util struct {
Gfs *mgo.GridFS Gfs *mgo.GridFS
} }
/*
pass through for GridFs
*/
func (u Util) Open(filename string) (file *mgo.GridFile, err error) {
return u.Gfs.Open(strings.ToLower(filename))
}
/*
pass through for GridFs
*/
func (u Util) Create(filename string) (file *mgo.GridFile, err error) { func (u Util) Create(filename string) (file *mgo.GridFile, err error) {
return u.Gfs.Create(strings.ToLower(filename)) return u.Gfs.Create(strings.ToLower(filename))
}
/*
pass through for GridFs
*/
func (u Util) Remove(filename string) (err error) {
return u.Gfs.Remove(strings.ToLower(filename))
} }
/* /*
@ -21,27 +38,60 @@ Find files by their MD5 checksum
*/ */
func (u Util) FindFilesByMd5(md5 string) (files []types.File, err error) { func (u Util) FindFilesByMd5(md5 string) (files []types.File, err error) {
err = u.Gfs.Find(bson.M{"md5": md5}).Sort("-metadata.timestamp").All(&files) err = u.Gfs.Find(bson.M{"md5": md5}).Sort("-metadata.timestamp").All(&files)
return files, err return files, err
} }
/* /*
match for file name match for file name
*/ */
func (u Util) FindFilesByName(filename string) (files []types.File, err error) { func (u Util) FindFilesByName(filename string) (files []types.File, err error) {
err = u.Gfs.Find(bson.M{"filename": filename}).Sort("-metadata.timestamp").All(&files) err = u.Gfs.Find(bson.M{"filename": filename}).Sort("-metadata.timestamp").All(&files)
return files, err return files, err
} }
/* /*
Case-insensitive pattern match for file name Case-insensitive pattern match for file name
*/ */
func (u Util) FindFilesByPatt(filename_pat string) (files []types.File, err error) { func (u Util) FindFilesByPatt(filename_pat string) (files []types.File, err error) {
err = u.Gfs.Find(bson.M{"filename": bson.M{ "$regex": filename_pat, "$options": "i"}}).Sort("-metadata.timestamp").All(&files) err = u.Gfs.Find(bson.M{"filename": bson.M{"$regex": filename_pat, "$options": "i"}}).Sort("-metadata.timestamp").All(&files)
return files, err return files, err
} }
/*
Case-insensitive pattern match for file name
*/
func (u Util) FindFilesByKeyword(keyword string) (files []types.File, err error) {
err = u.Gfs.Find(bson.M{"metadata.keywords": strings.ToLower(keyword)}).Sort("-metadata.timestamp").All(&files)
return files, err
}
/*
Get all the files.
pass -1 for all files
*/
func (u Util) GetFiles(limit int) (files []types.File, err error) {
if limit == -1 {
err = u.Gfs.Find(nil).Sort("-metadata.timestamp").Limit(limit).All(&files)
} else {
err = u.Gfs.Find(nil).Sort("-metadata.timestamp").All(&files)
}
return files, err
}
/*
Count the filename matches
*/
func (u Util) CountFiles(filename string) (count int, err error) {
query := u.Gfs.Find(bson.M{"filename": strings.ToLower(filename)})
return query.Count()
}
/*
Get one file back, by searching by file name
*/
func (u Util) GetFileByFilename(filename string) (this_file types.File, err error) { func (u Util) GetFileByFilename(filename string) (this_file types.File, err error) {
err = u.Gfs.Find(bson.M{"filename": filename}).One(&this_file) err = u.Gfs.Find(bson.M{"filename": strings.ToLower(filename)}).One(&this_file)
if err != nil { if err != nil {
return this_file, err return this_file, err
} }
@ -63,9 +113,11 @@ func (u Util) GetFileRandom() (this_file types.File, err error) {
return this_file, nil return this_file, nil
} }
/* Check whether this types.File filename is on Mongo */ /*
Check whether this types.File filename is on Mongo
*/
func (u Util) HasFileByFilename(filename string) (exists bool, err error) { func (u Util) HasFileByFilename(filename string) (exists bool, err error) {
c, err := u.Gfs.Find(bson.M{"filename": filename}).Count() c, err := u.CountFiles(filename)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -83,7 +135,7 @@ func (u Util) HasFileByMd5(md5 string) (exists bool, err error) {
} }
func (u Util) HasFileByKeyword(keyword string) (exists bool, err error) { func (u Util) HasFileByKeyword(keyword string) (exists bool, err error) {
c, err := u.Gfs.Find(bson.M{"metadata": bson.M{"keywords": keyword}}).Count() c, err := u.Gfs.Find(bson.M{"metadata": bson.M{"keywords": strings.ToLower(keyword)}}).Count()
if err != nil { if err != nil {
return false, err return false, err
} }

View file

@ -19,7 +19,6 @@ import (
"github.com/vbatts/imgsrv/types" "github.com/vbatts/imgsrv/types"
"github.com/vbatts/imgsrv/util" "github.com/vbatts/imgsrv/util"
"labix.org/v2/mgo" "labix.org/v2/mgo"
"labix.org/v2/mgo/bson"
) )
var ( var (
@ -28,7 +27,6 @@ var (
mongo_session *mgo.Session // FIXME make this not global mongo_session *mgo.Session // FIXME make this not global
images_db *mgo.Database // FIXME make this not global images_db *mgo.Database // FIXME make this not global
gfs *mgo.GridFS // FIXME make this not global
du dbutil.Util du dbutil.Util
) )
@ -73,8 +71,7 @@ func initMongo() {
log.Panic(err) log.Panic(err)
} }
} }
gfs = images_db.GridFS("fs") du.Gfs = images_db.GridFS("fs")
du.Gfs = gfs
} }
func serverErr(w http.ResponseWriter, r *http.Request, e error) { func serverErr(w http.ResponseWriter, r *http.Request, e error) {
@ -118,8 +115,7 @@ func routeViewsGET(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
if len(uriChunks) == 2 && len(uriChunks[1]) > 0 { if len(uriChunks) == 2 && len(uriChunks[1]) > 0 {
var file types.File file, err := du.GetFileByFilename(uriChunks[1])
err := gfs.Find(bson.M{"filename": strings.ToLower(uriChunks[1])}).One(&file)
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
return return
@ -176,9 +172,7 @@ func routeFilesGET(w http.ResponseWriter, r *http.Request) {
if len(uriChunks) == 2 && len(filename) > 0 { if len(uriChunks) == 2 && len(filename) > 0 {
log.Printf("Searching for [%s] ...", filename) log.Printf("Searching for [%s] ...", filename)
query := gfs.Find(bson.M{"filename": filename}) c, err := du.CountFiles(filename)
c, err := query.Count()
// preliminary checks, if they've passed an image name // preliminary checks, if they've passed an image name
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
@ -196,7 +190,7 @@ func routeFilesGET(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "max-age=315360000") w.Header().Set("Cache-Control", "max-age=315360000")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
file, err := gfs.Open(filename) file, err := du.Open(filename)
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
return return
@ -254,9 +248,11 @@ func routeFilesPOST(w http.ResponseWriter, r *http.Request) {
v := r.FormValue(word) v := r.FormValue(word)
if len(v) > 0 { if len(v) > 0 {
if strings.Contains(v, ",") { if strings.Contains(v, ",") {
info.Keywords = append(info.Keywords, strings.Split(v, ",")...) for _, word := range strings.Split(v, ",") {
info.Keywords = append(info.Keywords, strings.Trim(word, " "))
}
} else { } else {
info.Keywords = append(info.Keywords, v) info.Keywords = append(info.Keywords, strings.Trim(v, " "))
} }
} }
} }
@ -272,7 +268,7 @@ func routeFilesPOST(w http.ResponseWriter, r *http.Request) {
exists, err := du.HasFileByFilename(filename) exists, err := du.HasFileByFilename(filename)
if err == nil && !exists { if err == nil && !exists {
file, err := gfs.Create(filename) file, err := du.Create(filename)
defer file.Close() defer file.Close()
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
@ -297,7 +293,7 @@ func routeFilesPOST(w http.ResponseWriter, r *http.Request) {
} else if exists { } else if exists {
if r.Method == "PUT" { if r.Method == "PUT" {
// TODO nothing will get here presently. Workflow needs more review // TODO nothing will get here presently. Workflow needs more review
file, err := gfs.Open(filename) file, err := du.Open(filename)
defer file.Close() defer file.Close()
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
@ -351,7 +347,7 @@ func routeFilesDELETE(w http.ResponseWriter, r *http.Request) {
} }
if exists { if exists {
err = gfs.Remove(uriChunks[1]) err = du.Remove(uriChunks[1])
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
return return
@ -402,9 +398,8 @@ func routeRoot(w http.ResponseWriter, r *http.Request) {
// Show a page of most recent images, and tags, and uploaders ... // Show a page of most recent images, and tags, and uploaders ...
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
//iter := gfs.Find(bson.M{"uploadDate": bson.M{"$gt": time.Now().Add(-time.Hour)}}).Limit(defaultPageLimit).Iter()
var files []types.File var files []types.File
err := gfs.Find(nil).Sort("-metadata.timestamp").Limit(defaultPageLimit).All(&files) files, err := du.GetFiles(defaultPageLimit)
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
return return
@ -427,7 +422,7 @@ func routeAll(w http.ResponseWriter, r *http.Request) {
// Show a page of all the images // Show a page of all the images
var files []types.File var files []types.File
err := gfs.Find(nil).Sort("-metadata.timestamp").All(&files) files, err := du.GetFiles(-1)
if err != nil { if err != nil {
serverErr(w, r, err) serverErr(w, r, err)
return return
@ -473,25 +468,28 @@ func routeKeywords(w http.ResponseWriter, r *http.Request) {
log.Printf("K: %s (%d)", uriChunks, len(uriChunks)) log.Printf("K: %s (%d)", uriChunks, len(uriChunks))
var iter *mgo.Iter
if uriChunks[len(uriChunks)-1] == "r" { if uriChunks[len(uriChunks)-1] == "r" {
// Path: /k/ // Path: /k/
// TODO determine how to show a random image by keyword ... // TODO determine how to show a random image by keyword ...
log.Println("random isn't built yet") log.Println("random isn't built yet")
httplog.LogRequest(r, 404) httplog.LogRequest(r, 404)
return return
} else if len(uriChunks) == 2 {
// Path: /k/:name
log.Println(uriChunks[1])
iter = gfs.Find(bson.M{"metadata.keywords": uriChunks[1]}).Sort("-metadata.timestamp").Limit(defaultPageLimit).Iter()
} }
files := []types.File{} var (
err := iter.All(&files) files []types.File
if err != nil { err error
serverErr(w, r, err) )
return if len(uriChunks) == 2 {
// Path: /k/:name
log.Println(uriChunks[1])
files, err = du.FindFilesByKeyword(uriChunks[1])
if err != nil {
serverErr(w, r, err)
return
}
} }
log.Printf("collected %d files", len(files)) log.Printf("collected %d files", len(files))
err = ListFilesPage(w, files) err = ListFilesPage(w, files)
if err != nil { if err != nil {
@ -751,7 +749,7 @@ func routeUpload(w http.ResponseWriter, r *http.Request) {
filename = strings.ToLower(fmt.Sprintf("%s%s", str, ext)) filename = strings.ToLower(fmt.Sprintf("%s%s", str, ext))
} }
file, err := gfs.Create(filename) file, err := du.Create(filename)
defer file.Close() defer file.Close()
if err != nil { if err != nil {
log.Printf("Failed to create on gfs: %s", err) log.Printf("Failed to create on gfs: %s", err)