/daisy.go
Go | 1524 lines | 1207 code | 132 blank | 185 comment | 277 complexity | 821095bec203107be4c785ee5a519293 MD5 | raw file
- package main
- /*
- 2016 by Jörg Ramb
- */
- import (
- "bufio"
- "bytes"
- "crypto/sha256"
- "encoding/base64"
- "encoding/gob"
- "encoding/hex"
- "encoding/json"
- "flag"
- "fmt"
- "io"
- "io/ioutil"
- "log"
- "net"
- "net/http"
- "net/url"
- "os"
- "os/signal"
- "path"
- "regexp"
- "runtime"
- "strconv"
- "strings"
- "time"
- "goji.io"
- "goji.io/pat"
- "golang.org/x/net/context"
- "github.com/spf13/viper"
- // "github.com/boltdb/bolt"
- "github.com/coreos/bbolt"
- // go get github.com/mattn/go-sqlite3
- //_ "github.com/mattn/go-sqlite3"
- // "github.com/satori/go.uuid"
- "github.com/josselin-c/go.uuid"
- // "github.com/google/uuid"
- //"github.com/pborman/uuid"
- //"github.com/nu7hatch/gouuid"
- )
- //var isoDateTimeFormat = time.RFC3339
- const isoDateTimeFormat = "2006-01-02 15:04:05"
- const contentTypeJSON = "application/json"
- const daisyVersion = "2018.07.15.2"
- //const storePath = "store" + string(os.PathSeparator)
- var storePath string //= viper.GetString("store")
- var logsPath string //= viper.GetString("logs")
- const pathMode = 0700
- // ba7eaa41-9321-4895-a0e9-fb365612163e
- var uuidRegexp = regexp.MustCompile(`^[[:xdigit:]]{8}-?[[:xdigit:]]{4}-?[[:xdigit:]]{4}-?[[:xdigit:]]{12}$`)
- var sourcePathRegexp = regexp.MustCompile("^/source(?:/([^/]+)(?:/(.*))?)?")
- //var filenameRegexp = regexp.MustCompile(`^[\w\-\. ]+$`) // \w = [0-9a-zA-Z_].
- //var sha256re = regexp.MustCompile(`[0-9a-f]{64}`)
- const (
- bucketAccesses = "ACCESSES"
- bucketSources = "SOURCES"
- bucketLinkSA = "SOURCE-ACCESSES-LINK"
- bucketLinkHS = "HASH-SOURCE-LINK"
- )
- // Counters is a setup of profiling values at a specific time
- type Counters struct {
- LastUpdate time.Time
- TimeDiff string
- Sources int
- Accesses int
- Fetches int
- Errors int
- }
- // DaisyDB is the type combining a collection of values related to a single instance of a Daisy DB
- type DaisyDB struct {
- DB *bolt.DB `json:"-"`
- dbNeedsBackup bool
- ServerStartTime time.Time
- Uptime string
- DBStats bolt.Stats
- DBStatsDiff bolt.Stats
- previousCounters Counters
- CurrentCounters Counters
- CountersDiff Counters
- }
- var gDB *DaisyDB
- var (
- requestLog *os.File
- errorLog *os.File
- urlPathPrefix string
- uuidStyle string
- setupAPIKey string
- )
- // Sub calculates a difference between two Counters
- func (from Counters) Sub(sub Counters) Counters {
- diff := from
- if !sub.LastUpdate.IsZero() {
- diff.TimeDiff = from.LastUpdate.Sub(sub.LastUpdate).String()
- }
- diff.Sources = from.Sources - sub.Sources
- diff.Accesses = from.Accesses - sub.Accesses
- diff.Fetches = from.Fetches - sub.Fetches
- diff.Errors = from.Errors - sub.Errors
- return diff
- }
- func enc(b []byte) string {
- return hex.EncodeToString(b)
- }
- func encMaybe(b *[]byte) *string {
- if b != nil {
- s := enc(*b)
- return &s
- }
- return nil
- }
- func dec(s string) []byte {
- b, _ := hex.DecodeString(strings.Trim(s, " "))
- return b
- }
- func decMaybe(s *string) *[]byte {
- if s != nil {
- b := dec(*s)
- return &b
- }
- return nil
- }
- func uuidEnc(u uuid.UUID) string {
- if uuidStyle == "full" {
- return u.String()
- }
- return strings.Trim(base64.URLEncoding.EncodeToString(u.Bytes()), "=")
- }
- func uuidDec(s string) (u uuid.UUID) {
- var err error
- if uuidRegexp.MatchString(s) {
- u, err = uuid.FromString(s)
- } else {
- var ub []byte
- ub, err = base64.URLEncoding.WithPadding(base64.NoPadding).DecodeString(s)
- u = uuid.FromBytesOrNil(ub)
- }
- if err != nil {
- panic(err)
- }
- return u
- }
- func errCheck(err error, msg string) {
- }
- func constructPath(sum []byte) string {
- p := path.Join(storePath, enc(sum[:1]))
- os.MkdirAll(p, pathMode)
- return path.Join(p, enc(sum))
- }
- func constructLogPath(sourceID uuid.UUID) string {
- sourceIDstr := strings.ToUpper(enc(sourceID[:]))
- sourceLogPath := path.Join(logsPath, "sourcelogs", sourceIDstr[:2])
- os.MkdirAll(sourceLogPath, pathMode)
- return path.Join(sourceLogPath, sourceIDstr+".log")
- }
- // JSONAHandler is a JSON representation of an access handler
- type JSONAHandler struct {
- AccessID string `json:"access"`
- SourceID string `json:"-"`
- URLPath string `json:"url-path"`
- FetchCount int `json:"fetch-count"`
- StartDate *string `json:"start-date,omitempty"`
- EndDate *string `json:"end-date,omitempty"`
- }
- // JSONSource is the JSON representation of a source
- type JSONSource struct {
- SourceID string `json:"source"`
- Binary bool `json:"binary"`
- MimeType *string `json:"mime-type,omitempty"`
- FileName *string `json:"file-name,omitempty"`
- URL *string `json:"url,omitempty"`
- StartDate *string `json:"start-date,omitempty"`
- EndDate *string `json:"end-date,omitempty"`
- APIKey *string `json:"apikey,omitempty"`
- Sha256 *string `json:"sha256,omitempty"`
- NumHandlers int `json:"num-handlers"`
- AccessHandlers []JSONAHandler `json:"access-handlers"`
- }
- // DBAHandler is the internal DB representation of an access handler
- type DBAHandler struct {
- AccessID uuid.UUID // this is the key anyway
- SourceID uuid.UUID `json:"source"`
- FetchCount int `json:"fetch-count"`
- StartDate *time.Time `json:"start-time,omitempty"`
- EndDate *time.Time `json:"end-time,omitempty"`
- }
- // DBSource is the internal DB representation of a source
- type DBSource struct {
- SourceID uuid.UUID
- Binary bool `json:"binary"`
- MimeType *string `json:"mine-type"`
- FileName *string `json:"file-name,omitempty"`
- URL *string `json:"url,omitempty"`
- StartDate *time.Time `json:"start-time,omitempty"`
- EndDate *time.Time `json:"end-time,omitempty"`
- Sha256 *[]byte `json:"sha256,omitempty"`
- NumHandlers int `json:"num-handlers"`
- AccessHandlers []DBAHandler `json:"access-handlers"`
- APIKey *string `json:"apikey"`
- }
- func parseDate(s *string) *time.Time {
- if s == nil || *s == "" {
- return nil
- }
- sdate, err := time.ParseInLocation(isoDateTimeFormat, *s, time.Local)
- if err != nil {
- sdate, err = time.ParseInLocation("2006-01-02", *s, time.Local)
- if err != nil {
- sdate, err = time.ParseInLocation(time.RFC3339Nano, *s, time.Local)
- if err != nil {
- sdate, err = time.ParseInLocation(time.RFC3339, *s, time.Local)
- }
- }
- }
- if err != nil {
- panic(err)
- }
- return &sdate
- }
- func renderDate(d *time.Time) *string {
- if d == nil {
- return nil
- }
- s := d.Format(isoDateTimeFormat)
- return &s
- }
- func renderDateOrEmpty(d *time.Time) string {
- strPtr := renderDate(d)
- if strPtr == nil {
- return ""
- }
- return *strPtr
- }
- func renderStringOrEmpty(s *string) string {
- if s == nil {
- return ""
- }
- return *s
- }
- // DBStruct converts a JSON representation to a DB representation
- func (s *JSONSource) DBStruct() *DBSource {
- if s == nil {
- return nil
- }
- sdate := parseDate(s.StartDate)
- edate := parseDate(s.EndDate)
- allHandlers := make([]DBAHandler, 0, len(s.AccessHandlers))
- for _, ah := range s.AccessHandlers {
- dbah := ah.DBStruct()
- if dbah != nil {
- allHandlers = append(allHandlers, *dbah)
- }
- }
- return &DBSource{
- Binary: s.Binary,
- MimeType: s.MimeType,
- FileName: s.FileName,
- URL: s.URL,
- StartDate: sdate,
- EndDate: edate,
- Sha256: decMaybe(s.Sha256),
- NumHandlers: s.NumHandlers,
- AccessHandlers: allHandlers,
- }
- }
- // JSONStruct converts a DB representation to a JSON representation
- func (a *DBAHandler) JSONStruct() *JSONAHandler {
- if a == nil {
- return nil
- }
- return &JSONAHandler{
- AccessID: uuidEnc(a.AccessID),
- SourceID: uuidEnc(a.SourceID),
- URLPath: urlPathPrefix + uuidEnc(a.AccessID),
- FetchCount: a.FetchCount,
- StartDate: renderDate(a.StartDate),
- EndDate: renderDate(a.EndDate),
- }
- }
- func listAccessHandlersToJSON(ahl []DBAHandler) []JSONAHandler {
- allHandlers := make([]JSONAHandler, 0, 10)
- for _, ah := range ahl {
- jah := ah.JSONStruct()
- if jah != nil {
- allHandlers = append(allHandlers, *jah)
- }
- }
- return allHandlers
- }
- // JSONStruct converts a DB representation to a JSON representation
- func (s *DBSource) JSONStruct() *JSONSource {
- if s == nil {
- return nil
- }
- return &JSONSource{
- SourceID: uuidEnc(s.SourceID), // explicit uuid.String() conversion
- Binary: s.Binary,
- MimeType: s.MimeType,
- FileName: s.FileName,
- URL: s.URL,
- StartDate: renderDate(s.StartDate),
- EndDate: renderDate(s.EndDate),
- Sha256: encMaybe(s.Sha256),
- NumHandlers: s.NumHandlers,
- AccessHandlers: listAccessHandlersToJSON(s.AccessHandlers),
- }
- }
- // DBStruct converts a JSON representation to a DB representation
- func (a *JSONAHandler) DBStruct() *DBAHandler {
- if a == nil {
- return nil
- }
- sdate := parseDate(a.StartDate)
- edate := parseDate(a.EndDate)
- return &DBAHandler{
- AccessID: uuidDec(a.AccessID),
- SourceID: uuidDec(a.SourceID),
- FetchCount: a.FetchCount,
- StartDate: sdate,
- EndDate: edate,
- }
- }
- func initDB(db *bolt.DB) error {
- log.Println("Initializing database", db)
- err := db.Update(func(tx *bolt.Tx) error {
- bucketNames := []string{
- bucketSources,
- bucketAccesses,
- bucketLinkSA,
- bucketLinkHS,
- }
- for _, bn := range bucketNames {
- if _, err := tx.CreateBucketIfNotExists([]byte(bn)); err != nil {
- return err
- }
- }
- return nil
- })
- return err
- }
- func databaseDump(db *DaisyDB, filename string) error {
- if filename == "-" {
- return nil
- }
- return db.DB.View(func(tx *bolt.Tx) error {
- src := tx.Bucket([]byte(bucketSources))
- var dumpFileName string
- if filename == "" {
- dumpFileName = "db_dump_" + time.Now().Format("2006-01-02_15-04-05") + ".tsv"
- } else {
- dumpFileName = filename
- }
- dumpFile, err := os.OpenFile(path.Join(storePath, dumpFileName), os.O_WRONLY|os.O_CREATE, 0600)
- if err != nil {
- return err
- }
- defer dumpFile.Close()
- src.ForEach(func(k, v []byte) error {
- s, _ := decodeDBSource(v)
- fmt.Fprintln(dumpFile, strings.Join([]string{
- "S",
- uuidEnc(s.SourceID),
- renderDateOrEmpty(s.StartDate),
- renderDateOrEmpty(s.EndDate),
- renderStringOrEmpty(encMaybe(s.Sha256)),
- renderStringOrEmpty(s.MimeType),
- renderStringOrEmpty(s.FileName),
- renderStringOrEmpty(s.URL),
- }, "\t"))
- allA, _ := dbGetAllAccesses(tx, s.SourceID)
- for _, a := range allA {
- //fmt.Println("%+v",a)
- fmt.Fprintln(dumpFile, strings.Join([]string{
- "A",
- uuidEnc(a.SourceID), // maybe not needed
- uuidEnc(a.AccessID),
- strconv.Itoa(a.FetchCount),
- renderDateOrEmpty(a.StartDate),
- renderDateOrEmpty(a.EndDate),
- }, "\t"))
- }
- //fmt.Fprintf(dumpFile, "S\t%s\t%s\n", uuidEnc(s.SourceID), "hej")
- return nil
- })
- log.Println("Database dumped to", dumpFile.Name())
- return nil
- })
- }
- func backupDB(db *DaisyDB) string {
- if !db.dbNeedsBackup {
- return ""
- }
- err := databaseDump(db, viper.GetString("dump-file"))
- if err != nil {
- fmt.Println("Error during databaseDump", err)
- }
- // rename the old backup (paranoid? backup of backup!)
- fname := path.Join(storePath, viper.GetString("dbfile")+".backup")
- // keep three generations (TODO?: parameter controlled?)
- err = os.Rename(fname+".1", fname+".2") // ignore errors
- err = os.Rename(fname, fname+".1") // ignore errors
- f, err := os.Create(fname)
- if err != nil {
- log.Fatal(err)
- }
- defer f.Close()
- w := bufio.NewWriter(f)
- defer w.Flush()
- // write the backup
- db.dbNeedsBackup = false
- err = db.DB.View(func(tx *bolt.Tx) error {
- n, err := tx.WriteTo(w)
- log.Printf("Backup wrote %d bytes\n", n)
- return err
- })
- if err != nil {
- return err.Error()
- }
- return " backup " + fname
- }
- func decodeDBSource(b []byte) (*DBSource, error) {
- if b != nil {
- var s DBSource
- decoder := gob.NewDecoder(bytes.NewBuffer(b))
- err := decoder.Decode(&s)
- if err != nil {
- return nil, err
- }
- return &s, nil
- }
- return nil, nil
- }
- func decodeDBAHandler(b []byte) (*DBAHandler, error) {
- if b != nil {
- var a DBAHandler
- decoder := gob.NewDecoder(bytes.NewBuffer(b))
- err := decoder.Decode(&a)
- if err != nil {
- return nil, err
- }
- return &a, nil
- }
- return nil, nil
- }
- // Bytes returns a []byte of a DB object
- func (s DBSource) Bytes() []byte {
- var stream bytes.Buffer
- encoder := gob.NewEncoder(&stream)
- s.AccessHandlers = nil // do not save list of AHs!
- encoder.Encode(s)
- return stream.Bytes()
- }
- // Bytes returns a []byte of a DB object
- func (a DBAHandler) Bytes() []byte {
- var stream bytes.Buffer
- enc := gob.NewEncoder(&stream)
- enc.Encode(a)
- return stream.Bytes()
- }
- func dbUpsertSource(tx *bolt.Tx, what string, s DBSource) error {
- var err error
- b := tx.Bucket([]byte(bucketSources))
- sBytes := s.Bytes()
- if err = b.Put(s.SourceID.Bytes(), sBytes); err != nil {
- return err
- }
- gDB.dbNeedsBackup = true
- if s.Sha256 != nil {
- dbLinkHashSource(tx, *s.Sha256, s.SourceID, s.Binary)
- }
- return err
- }
- func dbIncrementAccess(aid uuid.UUID) error {
- // Update would serialize, but will not be called multiple times
- err := gDB.DB.Batch(func(tx *bolt.Tx) error {
- var err error
- b := tx.Bucket([]byte(bucketAccesses))
- ah, err := decodeDBAHandler(b.Get(aid[:]))
- if err != nil {
- return err
- }
- if ah != nil {
- ah.FetchCount = ah.FetchCount + 1
- err = b.Put(aid[:], ah.Bytes())
- gDB.dbNeedsBackup = true
- }
- return err
- })
- return err
- }
- func dbUpsertAccess(tx *bolt.Tx, what string, a DBAHandler) error {
- b := tx.Bucket([]byte(bucketAccesses))
- err := b.Put(a.AccessID.Bytes(), a.Bytes())
- gDB.dbNeedsBackup = true
- return err
- }
- func dbGetAccess(tx *bolt.Tx, aid uuid.UUID) (*DBAHandler, error) {
- b := tx.Bucket([]byte(bucketAccesses))
- return decodeDBAHandler(b.Get(aid[:]))
- }
- func dbLinkSourceAccess(tx *bolt.Tx, s uuid.UUID, a uuid.UUID) error {
- b := tx.Bucket([]byte(bucketLinkSA))
- joined := bytes.Join([][]byte{s.Bytes(), a.Bytes()}, nil)
- err := b.Put(joined, nil) // no data stored on the link
- gDB.dbNeedsBackup = true
- return err
- }
- func countPrefixes(c *bolt.Cursor, prefix []byte) int {
- cnt := 0
- for k, _ := c.Seek(prefix); bytes.HasPrefix(k, prefix); k, _ = c.Next() {
- cnt = cnt + 1
- }
- return cnt
- }
- func countNodes(b *bolt.Bucket) int {
- cnt := 0
- b.ForEach(func(k, v []byte) error {
- cnt++
- return nil
- })
- return cnt
- }
- func countObjects(db *DaisyDB) {
- db.DB.View(func(tx *bolt.Tx) error {
- db.CurrentCounters.Sources = countNodes(tx.Bucket([]byte(bucketSources)))
- db.CurrentCounters.Accesses = countNodes(tx.Bucket([]byte(bucketAccesses)))
- db.previousCounters = db.CurrentCounters
- return nil
- })
- }
- /*
- func checkExistsKey(bucket string, key []byte) bool {
- var existing bool
- err := gDB.DB.View(func(tx *bolt.Tx) error {
- b := tx.Bucket([]byte(bucket))
- result := b.Get(key)
- existing = (result != nil)
- return nil
- })
- if err != nil {
- panic(err)
- }
- return existing
- }
- */
- func dbLinkHashSource(tx *bolt.Tx, hash []byte, s uuid.UUID, addIt bool) error {
- b := tx.Bucket([]byte(bucketLinkHS))
- joined := bytes.Join([][]byte{hash, s.Bytes()}, nil)
- if addIt {
- return b.Put(joined, nil) // nil is ok
- }
- err := b.Delete(joined)
- return err //b.Delete(joined)
- }
- func dbGetAllAccesses(tx *bolt.Tx, sid uuid.UUID) ([]DBAHandler, error) {
- allHandlers := make([]DBAHandler, 0, 10)
- c := tx.Bucket([]byte(bucketLinkSA)).Cursor()
- b := tx.Bucket([]byte(bucketAccesses))
- for k, _ := c.Seek(sid.Bytes()); bytes.HasPrefix(k, sid.Bytes()); k, _ = c.Next() {
- ah, err := decodeDBAHandler(b.Get(k[16:])) // aid is the second part of the key
- if err != nil {
- return nil, err
- }
- allHandlers = append(allHandlers, *ah)
- }
- return allHandlers, nil
- }
- func dbGetSource(tx *bolt.Tx, sid uuid.UUID) (*DBSource, error) {
- var s *DBSource
- b := tx.Bucket([]byte(bucketSources))
- var err error
- s, err = decodeDBSource(b.Get(sid.Bytes()))
- if err != nil {
- return nil, err
- }
- if s != nil {
- s.AccessHandlers, err = dbGetAllAccesses(tx, sid)
- }
- return s, err
- }
- func deleteFile(hash []byte) error {
- return os.Remove(constructPath(hash))
- }
- func expireBinaries(db *DaisyDB) string {
- now := time.Now().AddDate(0, 0, -viper.GetInt("retention-days"))
- deletedFiles := 0
- err := db.DB.Batch(func(tx *bolt.Tx) error {
- hs := tx.Bucket([]byte(bucketLinkHS)).Cursor()
- src := tx.Bucket([]byte(bucketSources))
- // go through all
- //for sid, source := src.First(); sid != nil ; sid, source := src.Next() {
- src.ForEach(func(k, v []byte) error {
- s, _ := decodeDBSource(v)
- //fmt.Println("checking", uuidEnc(s.SourceID))
- if s.Binary && s.EndDate != nil && s.Sha256 != nil && now.After(*s.EndDate) {
- s.Binary = false
- copies := countPrefixes(hs, *s.Sha256) // need to check before, because deletions are delayed??
- // updating the source will remove the hash-link as well
- if err := dbUpsertSource(tx, "update", *s); err != nil {
- return err
- }
- //fmt.Println("checking ", enc(*s.Sha256), copies)
- //if no HS left then delete binary
- if copies <= 1 { // is 1 if there is only (this) instance
- fmt.Println("DELETING FILE:", enc(*s.Sha256))
- deletedFiles = deletedFiles + 1
- deleteFile(*s.Sha256)
- sourceLog(s.SourceID, nil, "deleted binary")
- }
- }
- return nil
- })
- fmt.Println("Deleted files:", deletedFiles)
- return nil
- })
- if err != nil {
- return err.Error()
- }
- return ""
- }
- func newAccessHandler(tx *bolt.Tx, sid uuid.UUID) (DBAHandler, error) {
- var a DBAHandler
- a.AccessID = uuid.Must(uuid.NewV4())
- a.SourceID = sid
- a.FetchCount = 0
- //a.URLPath = "/g/" + a.AccessID
- dbUpsertAccess(tx, "insert", a) // was: go ...
- gDB.CurrentCounters.Accesses++
- dbLinkSourceAccess(tx, sid, a.AccessID)
- return a, nil
- }
- func extendAccessHandlers(tx *bolt.Tx, s *DBSource) error {
- //if s.AccessHandlers == nil {
- //newHandlers := make([]DBAHandler, 0, s.NumHandlers)
- //s.AccessHandlers = newHandlers
- //}
- for i := len(s.AccessHandlers); i < s.NumHandlers; i++ {
- a, err := newAccessHandler(tx, s.SourceID)
- if err != nil {
- return err
- }
- newHandlers := append(s.AccessHandlers, a)
- s.AccessHandlers = newHandlers
- }
- return nil
- }
- func genericHandler(w http.ResponseWriter, r *http.Request) {
- fmt.Fprintf(w, "r.URL.Path=%s\n", r.URL.Path)
- }
- func getSourceOrPanic(tx *bolt.Tx, sourceID uuid.UUID) *DBSource {
- s, err := dbGetSource(tx, sourceID)
- if s == nil {
- panic(fmt.Errorf("No such source found"))
- }
- if err != nil {
- panic(err)
- }
- return s
- }
- func getAccessOrPanic(tx *bolt.Tx, accessID uuid.UUID) *DBAHandler {
- a, err := dbGetAccess(tx, accessID)
- if a == nil {
- panic(fmt.Errorf("No such access handler found: %s", uuidEnc(accessID)))
- }
- if err != nil {
- panic(err)
- }
- return a
- }
- func verifyActiveDate(from *time.Time, to *time.Time) bool {
- now := time.Now()
- if from != nil && now.Before(*from) {
- return false
- }
- if to != nil && now.After(*to) {
- return false
- }
- return true
- }
- func verifyActiveDateStr(startDate *string, endDate *string) bool {
- from := parseDate(startDate)
- to := parseDate(endDate)
- return verifyActiveDate(from, to)
- }
- func printStackTrace(err error) {
- if viper.GetBool("stacktrace") {
- buf := make([]byte, 10240)
- runtime.Stack(buf, false)
- log.Printf("%s\n%s\n", err, buf)
- }
- }
- func decodeRemoteAddr(r *http.Request) string {
- strp := regexp.MustCompile(":[0-9]+$") // strip :port
- for _, h := range []string{"X-Forwarded-For", "X-Real-Ip"} {
- addresses := strings.Split(r.Header.Get(h), ",")
- // march from right to left until we get a public address
- // that will be the address right before our proxy.
- for i := len(addresses) - 1; i >= 0; i-- {
- ip := strings.TrimSpace(addresses[i])
- // header can contain spaces too, strip those out.
- realIP := net.ParseIP(ip)
- if !realIP.IsGlobalUnicast() {
- // bad address, go to next
- continue
- }
- return strp.ReplaceAllLiteralString(ip, "")
- }
- }
- return strp.ReplaceAllLiteralString(r.RemoteAddr, "")
- /* // old version, did not handle ipv6
- xff := r.Header.Get("X-Forwarded-For")
- ra := r.RemoteAddr // might be ipv6: [::1]:47016!
- ips := strings.Split(ra, ":")
- ra = ips[0]
- if xff != "" {
- ips = strings.Split(xff, ", ")
- return fmt.Sprintf("%s(%s)", ips[0], ra)
- } else {
- return ra
- }
- */
- }
- func logError(w http.ResponseWriter, r *http.Request, err error) {
- gDB.CurrentCounters.Errors++
- t := time.Now().Format(isoDateTimeFormat)
- fmt.Fprintf(errorLog, "%s %s %s %s: %s\n", t, decodeRemoteAddr(r), r.Method, r.URL.Path, err)
- fmt.Fprintln(w, err.Error())
- printStackTrace(err)
- errorLog.Sync()
- }
- func fetchHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- //accessIDStr := r.URL.Path[len(viper.GetString("fetch-prefix")):]
- accessIDStr := pat.Param(ctx, "accessid") //r.URL.Path
- accessID := uuidDec(accessIDStr)
- var ah *DBAHandler
- var s *DBSource
- err := gDB.DB.View(func(tx *bolt.Tx) error {
- ah = getAccessOrPanic(tx, accessID)
- /*
- ah, err = dbGetAccess(tx, accessID)
- if err != nil {
- return err
- }
- */
- if !verifyActiveDate(ah.StartDate, ah.EndDate) {
- return fmt.Errorf("Access ID not active")
- }
- s = getSourceOrPanic(tx, ah.SourceID)
- if !verifyActiveDate(s.StartDate, s.EndDate) {
- return fmt.Errorf("Source not active")
- }
- if s.URL == nil && s.Sha256 == nil {
- return fmt.Errorf("Source is missing Sha256")
- }
- return nil
- })
- if err != nil {
- logError(w, r, err)
- //sourceLog(ah.SourceID, r, err.Error())
- return
- }
- gDB.CurrentCounters.Fetches++
- go dbIncrementAccess(ah.AccessID) // offline, will be in own goroutine
- if s.URL != nil {
- http.Redirect(w, r, *s.URL, http.StatusSeeOther)
- sourceLog(ah.SourceID, r, "redirected: "+*s.URL)
- } else {
- err = sendRawFile(w, *s.Sha256, s.MimeType, s.FileName) // view does not block? so ok to do this in tx
- if err != nil {
- logError(w, r, err)
- sourceLog(ah.SourceID, r, err.Error())
- return
- }
- sourceLog(ah.SourceID, r, "fetched")
- }
- }
- func simpleFileName(s string) string {
- reg, err := regexp.Compile("[^A-Za-z0-9.-]")
- if err != nil {
- return "file.xxx"
- }
- return reg.ReplaceAllString(s, "_")
- }
- func sendRawFile(w http.ResponseWriter, sha []byte, mimeType *string, fileName *string) error {
- f, err := os.Open(constructPath(sha))
- if err != nil {
- return err
- }
- defer f.Close()
- if mimeType != nil {
- w.Header().Set("Content-Type", *mimeType)
- // w.Header().Set("Content-Type", "application/octet-stream") //forces the save as dialog
- }
- //Disposition makes the browser to offer save-as with the given file name, instead of displaying it
- if fileName != nil /*&& filenameRegexp.MatchString(*filename)*/ {
- w.Header().Set("Content-Disposition", "attachment; "+
- "filename=\""+simpleFileName(*fileName)+"\"; "+
- "filename*=UTF-8''"+strings.Replace(url.QueryEscape(*fileName), "+", "%20", -1))
- // w.Header().Set("Content-Disposition", "attachment; filename*=UTF-8''"+url.QueryEscape(*fileName))
- // w.Header().Set("Content-Disposition", "attachment; filename*=UTF-8''"+*fileName)
- }
- io.Copy(w, f)
- return nil
- }
- func rawHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- var sum string = r.URL.Path[len("/raw/"):]
- sha := dec(sum)
- //fmt.Println(sum)
- if err := sendRawFile(w, sha, nil, nil); err != nil {
- logError(w, r, err)
- }
- }
- /* Catch "panic" and generate a "BadRequest" */
- func catchPanic(ctx context.Context, w http.ResponseWriter, r *http.Request, sendDetails bool) {
- if err := recover(); err != nil {
- t := time.Now().Format(isoDateTimeFormat)
- fmt.Fprintf(errorLog, "%s %s %s %s: %s\n", t, decodeRemoteAddr(r), r.Method, r.URL.Path, err)
- gDB.CurrentCounters.Errors++
- if sendDetails {
- http.Error(w, fmt.Sprintf("%s", err), http.StatusInternalServerError)
- } else {
- http.Error(w, "Internal error", http.StatusInternalServerError)
- }
- if viper.GetBool("stacktrace") {
- buf := make([]byte, 1024)
- runtime.Stack(buf, false)
- log.Printf("%s\n%s\n", err, buf)
- }
- //panic(err)
- }
- }
- func logRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, started time.Time) {
- //println(r.Method, r.URL.Path)
- now := time.Now()
- t := now.Format(isoDateTimeFormat)
- //if _, ok := *w.(ResponseWriterProxy); ok {
- fmt.Fprintf(requestLog, "%s %s %s %s (%s)\n",
- t, decodeRemoteAddr(r), r.Method, r.URL.Path, now.Sub(started))
- //} else { // normal ResponseWriter, no more info
- //fmt.Fprintf(requestLog, "%s %s %s %s\n", t, decodeRemoteAddr(r), r.Method, r.URL.Path)
- //}
- requestLog.Sync()
- }
- //http://grokbase.com/t/gg/golang-nuts/154v03yk9y/go-nuts-http-response-status-in-a-http-handler
- func logging(h goji.Handler) goji.Handler {
- return goji.HandlerFunc(func(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- //xw := modwriter{ResponseWriter: w} // create a wrapping
- started := time.Now()
- defer logRequest(ctx, w, r, started)
- defer catchPanic(ctx, w, r, true)
- h.ServeHTTPC(ctx, w, r)
- })
- }
- func sourceLog(sourceID uuid.UUID, r *http.Request, logStr string) {
- go func() {
- logFileName := constructLogPath(sourceID)
- // log.Println("sourceLog", logFileName, logStr)
- var logStrFinal string
- if r != nil {
- logStrFinal = fmt.Sprintf("%s | %s | %s | %s", decodeRemoteAddr(r), r.Method, r.URL.Path, logStr)
- } else {
- logStrFinal = logStr
- }
- logFile, err := os.OpenFile(logFileName, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
- if err != nil {
- log.Println("Source log error:", uuidEnc(sourceID), logStrFinal)
- } else {
- // fmt.Println(time.Now().Format(isoDateTimeFormat), "|", logStrFinal)
- fmt.Fprintln(logFile, time.Now().Format(isoDateTimeFormat), "|", logStrFinal)
- logFile.Close()
- }
- }()
- }
- func refreshStats(db *DaisyDB) {
- //gDB.DBStats = gDB.DB.Stats()
- stats := db.DB.Stats()
- gDB.DBStatsDiff = stats.Sub(&gDB.DBStats)
- gDB.DBStats = stats
- }
- func wuiHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- switch r.Method {
- case "GET":
- switch pat.Param(ctx, "cmd") {
- case "stats":
- //refreshStats(gDB)
- now := time.Now().Round(time.Second)
- gDB.CurrentCounters.LastUpdate = now
- gDB.Uptime = now.Sub(gDB.ServerStartTime).String()
- w.Header().Set("Content-Type", contentTypeJSON)
- if b, err := json.Marshal(gDB); err != nil {
- log.Println("Marshal failed for stats:", err)
- w.WriteHeader(http.StatusInternalServerError)
- } else {
- var out bytes.Buffer
- json.Indent(&out, b, "", " ")
- out.WriteTo(w) // returns n, err
- }
- default:
- logError(w, r, fmt.Errorf("Invalid wui request %s -> %s", r.URL.Path, pat.Param(ctx, "cmd")))
- }
- }
- }
- func sourcePostNewHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- var result JSONSource
- var inputJ JSONSource
- var input *DBSource
- var current *DBSource
- b, err := ioutil.ReadAll(r.Body)
- //fmt.Printf("body: %s\n", b)
- err = json.Unmarshal(b, &inputJ)
- if inputJ.APIKey == nil {
- logError(w, r, fmt.Errorf("Unauthorized access"))
- return
- }
- if setupAPIKey != "" && strings.Compare(*inputJ.APIKey, setupAPIKey) != 0 {
- logError(w, r, fmt.Errorf("Unauthorized access: %s", *inputJ.APIKey))
- return
- }
- input = inputJ.DBStruct()
- if err != nil {
- panic(err)
- }
- // create a new source
- current = input // start with what the input gave us
- current.Binary = false
- current.SourceID = uuid.Must(uuid.NewV4())
- if current.StartDate == nil {
- now := time.Now()
- current.StartDate = &now
- }
- if current.EndDate == nil {
- nowLater := current.StartDate.AddDate(0, viper.GetInt("default-end-months"), -1)
- current.EndDate = &nowLater
- }
- //if current.FileName != nil && !filenameRegexp.MatchString(current.FileName) {
- //logError(w, r, fmt.Errorf("Invalid filename: "+current.FileName))
- //return
- //}
- // either FileName and MimeType or URL must be given
- if current.URL == nil && (current.FileName == nil || current.MimeType == nil) {
- logError(w, r, fmt.Errorf("Either url or file-name and mime-type must be given"))
- return
- }
- err = gDB.DB.Batch(func(tx *bolt.Tx) error {
- err = dbUpsertSource(tx, "insert", *current)
- sourceLog(current.SourceID, r, "created")
- gDB.CurrentCounters.Sources++
- if err != nil {
- return err
- }
- return extendAccessHandlers(tx, current)
- })
- if err != nil {
- logError(w, r, err)
- }
- result = *current.JSONStruct()
- w.Header().Set("Content-Type", contentTypeJSON)
- b, err = json.Marshal(result)
- if err != nil {
- log.Println("Marshal failed", result, err)
- w.WriteHeader(http.StatusInternalServerError)
- }
- var out bytes.Buffer
- json.Indent(&out, b, "", " ")
- out.WriteTo(w) // returns n, err
- }
- func sourceGetBinaryHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- sourceIDStr := pat.Param(ctx, "sourceid")
- err := gDB.DB.View(func(tx *bolt.Tx) error {
- source := getSourceOrPanic(tx, uuidDec(sourceIDStr))
- if source.Sha256 != nil {
- return sendRawFile(w, *source.Sha256, source.MimeType, source.FileName)
- }
- return fmt.Errorf("Not a binary")
- })
- if err != nil {
- logError(w, r, err)
- }
- }
- func sourceGetLog(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- sourceIDStr := pat.Param(ctx, "sourceid")
- sourceID := uuidDec(sourceIDStr)
- logFile, err := os.Open(constructLogPath(sourceID))
- // fmt.Println("sourceGetLog", logFile.Name())
- if err != nil {
- logError(w, r, err)
- w.WriteHeader(http.StatusInternalServerError)
- return
- }
- w.Header().Set("content-Type", "text/plain")
- io.Copy(w, logFile)
- }
- func sourceGetHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- var result JSONSource
- var current *DBSource
- sourceIDStr := pat.Param(ctx, "sourceid")
- var sourceID uuid.UUID
- if sourceIDStr != "" {
- sourceID = uuidDec(sourceIDStr)
- }
- err := gDB.DB.View(func(tx *bolt.Tx) error {
- current = getSourceOrPanic(tx, sourceID)
- return nil
- })
- if err != nil {
- logError(w, r, err)
- }
- result = *current.JSONStruct()
- w.Header().Set("Content-Type", contentTypeJSON)
- b, err := json.Marshal(result)
- if err != nil {
- log.Println("Marshal failed", result, err)
- w.WriteHeader(http.StatusInternalServerError)
- }
- var out bytes.Buffer
- json.Indent(&out, b, "", " ")
- out.WriteTo(w) // returns n, err
- }
- func sourcePostHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- var result JSONSource
- var inputJ JSONSource
- var input *DBSource
- var current *DBSource
- sourceIDStr := pat.Param(ctx, "sourceid")
- var sourceID uuid.UUID
- if sourceIDStr != "" {
- sourceID = uuidDec(sourceIDStr)
- }
- b, err := ioutil.ReadAll(r.Body)
- //fmt.Printf("body: %s\n", b)
- err = json.Unmarshal(b, &inputJ)
- if inputJ.APIKey == nil {
- logError(w, r, fmt.Errorf("Unauthorized access"))
- return
- }
- if setupAPIKey != "" && strings.Compare(*inputJ.APIKey, setupAPIKey) != 0 {
- logError(w, r, fmt.Errorf("Unauthorized access: %s", *inputJ.APIKey))
- return
- }
- input = inputJ.DBStruct()
- if err != nil {
- panic(err)
- }
- err = gDB.DB.Batch(func(tx *bolt.Tx) error {
- current = getSourceOrPanic(tx, sourceID)
- if input.NumHandlers > 500 {
- return fmt.Errorf("Too many access handlers: %d", input.NumHandlers)
- }
- if current.NumHandlers < input.NumHandlers {
- current.NumHandlers = input.NumHandlers
- }
- if input.StartDate != nil {
- current.StartDate = input.StartDate
- }
- if input.EndDate != nil {
- current.EndDate = input.EndDate
- }
- if len(input.AccessHandlers) != 0 {
- for _, ah := range input.AccessHandlers {
- a := getAccessOrPanic(tx, ah.AccessID)
- if ah.StartDate != nil {
- a.StartDate = ah.StartDate
- }
- if ah.EndDate != nil {
- a.EndDate = ah.EndDate
- }
- err = dbUpsertAccess(tx, "update", *a)
- if err != nil {
- logError(w, r, fmt.Errorf("Could not update access object %s", uuidEnc(ah.AccessID)))
- }
- }
- }
- err = dbUpsertSource(tx, "update", *current)
- if err != nil {
- panic(fmt.Sprintf("Could not update source"))
- }
- sourceLog(current.SourceID, r, "updated")
- return extendAccessHandlers(tx, current)
- })
- if err != nil {
- logError(w, r, err)
- }
- result = *current.JSONStruct()
- w.Header().Set("Content-Type", contentTypeJSON)
- b, err = json.Marshal(result)
- if err != nil {
- log.Println("Marshal failed", result, err)
- w.WriteHeader(http.StatusInternalServerError)
- }
- var out bytes.Buffer
- json.Indent(&out, b, "", " ")
- out.WriteTo(w) // returns n, err
- }
- func calculateHashForFile(fileName string) []byte {
- f, err := os.Open(fileName)
- if err != nil {
- panic(err)
- }
- defer f.Close()
- hash := sha256.New()
- io.Copy(hash, f)
- return hash.Sum(nil) // []byte
- }
- func sourcePutBinaryHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- var result JSONSource
- var current *DBSource
- sourceIDStr := pat.Param(ctx, "sourceid")
- var sourceID uuid.UUID
- if sourceIDStr != "" {
- sourceID = uuidDec(sourceIDStr)
- }
- //NOTE: PUT always needs client provided IDs and always overwrites (send full contents).
- //Hence the only useful/allowed usage here is uploading binaries!
- f, err := ioutil.TempFile(storePath, "tmp") // + "hej"
- if err != nil {
- logError(w, r, err)
- return
- }
- tmpFileName := f.Name()
- _, err = io.Copy(f, r.Body)
- if err != nil {
- logError(w, r, err)
- return
- }
- f.Close()
- md := calculateHashForFile(tmpFileName)
- //mdSum := enc(md)
- err = gDB.DB.Batch(func(tx *bolt.Tx) error {
- current = getSourceOrPanic(tx, sourceID)
- if current.Sha256 == nil {
- current.Sha256 = &md
- }
- if bytes.Equal(md, *current.Sha256) {
- //fmt.Printf("Sum: %v\n", mdSum)
- err := os.Rename(tmpFileName, constructPath(md))
- if err != nil {
- return err
- }
- //fmt.Printf("## Renamed: %s to %s\n", tmpFileName, constructPath(md))
- // define binary as uploaded to the result (and update the database accordingly)
- current.Binary = true
- sourceLog(current.SourceID, r, "updated")
- return dbUpsertSource(tx, "update", *current)
- }
- fmt.Printf("Removing " + tmpFileName)
- os.Remove(tmpFileName)
- //w.WriteHeader(http.StatusBadRequest)
- return fmt.Errorf("Missmatch SHA256 sum, expected %s", *current.Sha256)
- })
- if err != nil {
- logError(w, r, err)
- }
- result = *current.JSONStruct()
- w.Header().Set("Content-Type", contentTypeJSON)
- b, err := json.Marshal(result)
- if err != nil {
- log.Println("Marshal failed", result, err)
- w.WriteHeader(http.StatusInternalServerError)
- }
- var out bytes.Buffer
- json.Indent(&out, b, "", " ")
- out.WriteTo(w)
- }
- // upload logic (CURRENTLY NOT USED)
- /*
- func formUpload(ctx context.Context, w http.ResponseWriter, r *http.Request) {
- fmt.Println("method:", r.Method)
- //if r.Method == "GET" {
- //crutime := time.Now().Unix()
- //h := md5.New()
- //io.WriteString(h, strconv.FormatInt(crutime, 10))
- //token := fmt.Sprintf("%x", h.Sum(nil))
- //t, _ := template.ParseFiles("upload.gtpl")
- //t.Execute(w, token)
- //} else {
- r.ParseMultipartForm(32 << 20)
- file, handler, err := r.FormFile("binary")
- if err != nil {
- log.Fatal(err)
- return
- }
- fmt.Println("binary=", file)
- defer file.Close()
- //fmt.Fprintf(w, "Header: %v", handler.Header)
- fmt.Printf("Header: %#v\n", handler.Header)
- fmt.Printf("Disposition: %v\n", handler.Header["Content-Disposition"][0])
- fmt.Printf("Content Type: %v\n", handler.Header["Content-Type"][0])
- f, err := os.OpenFile("./test/"+handler.Filename, os.O_WRONLY|os.O_CREATE, 0666)
- if err != nil {
- log.Fatal(err)
- return
- }
- defer f.Close()
- io.Copy(f, file)
- u4 := uuid.NewV4()
- fmt.Fprintf(w, `{"source":"%s"}\n`, u4)
- //}
- }
- */
- /*
- This function is executed as a go routine.
- It is a simple scheduler for periodic work, such as backup.
- */
- func periodicExecution(db *DaisyDB) {
- time.Sleep(5 * time.Second)
- for {
- log.Println("Periodic execution start")
- log.Print(backupDB(db))
- log.Println("Expire binaries")
- log.Print(expireBinaries(db))
- log.Println("Periodic execution ends")
- nextInterval := viper.GetDuration("backup-interval")
- //log.Println("Next execution: "+nextInterval)
- time.Sleep(nextInterval) //8 * time.Hour)
- }
- }
- func pulseExecution(db *DaisyDB) {
- //previousStats := db.DB.Stats()
- for {
- log.Println("Pulse")
- db.CurrentCounters.LastUpdate = time.Now().Round(time.Second)
- db.CountersDiff = db.CurrentCounters.Sub(db.previousCounters)
- db.previousCounters = db.CurrentCounters
- nextInterval := viper.GetDuration("pulse-interval")
- //log.Println("Next execution: "+nextInterval)
- refreshStats(db)
- //stats := db.DB.Stats()
- //diff := stats.Sub(&previousStats)
- ////json.NewEncoder(os.Stdout).Encode(diff)
- //db.DBStats = stats
- time.Sleep(nextInterval)
- }
- }
- func catchCtrlC(db *bolt.DB) {
- c := make(chan os.Signal, 1)
- signal.Notify(c, os.Interrupt)
- go func() {
- for sig := range c {
- // sig is a ^C, handle it
- log.Printf("Caught interrupt %#v\n", sig)
- db.Close()
- log.Println("Database closed. Exiting.")
- os.Exit(0)
- }
- }()
- }
- func openLogFiles() {
- requestLogFname := "request.log"
- errorLogFname := "error.log"
- var err error
- os.Mkdir(logsPath, pathMode)
- requestLog, err = os.OpenFile(path.Join(logsPath, requestLogFname), os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
- if err != nil {
- log.Fatal(err)
- }
- errorLog, err = os.OpenFile(path.Join(logsPath, errorLogFname), os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
- if err != nil {
- log.Fatal(err)
- }
- }
- func listenHTTP() {
- //if *APIKey == "" {
- //log.Println("*** You need to give an APIkey!")
- //flag.PrintDefaults()
- //return
- //}
- portStr := strconv.Itoa(viper.GetInt("port"))
- prefix := viper.GetString("prefix")
- fetchPrefix := viper.GetString("fetch-prefix")
- uuidStyle = viper.GetString("uuid-style")
- setupAPIKey = viper.GetString("api-key")
- log.Println("Port: " + portStr)
- log.Println("ServerURL: " + viper.GetString("server"))
- log.Println("PathPrefix: " + prefix)
- log.Println("uuidStyle: " + uuidStyle)
- mux := goji.NewMux()
- //http.HandleFunc(prefix+"/source/", http.StripPrefix(prefix, sourceHandler))
- //http.HandleFunc(prefix+"/source/", sourceHandler)
- //http.HandleFunc(prefix+"/access/", accessHandler)
- //http.HandleFunc(prefix+fetchPrefix, fetchHandler)
- //http.HandleFunc(prefix+"/upload", formUpload)
- //mux.HandleFuncC(pat.Get(prefix+"/wui/:cmd"), wuiHandler)
- mux.UseC(logging) // middleware logging
- mux.HandleFuncC(pat.Get(prefix+"/wui/:cmd"), wuiHandler)
- mux.HandleFuncC(pat.Post(prefix+"/source/"), sourcePostNewHandler)
- mux.HandleFuncC(pat.Get(prefix+"/source/:sourceid"), sourceGetHandler)
- mux.HandleFuncC(pat.Get(prefix+"/source/:sourceid/log"), sourceGetLog)
- mux.HandleFuncC(pat.Post(prefix+"/source/:sourceid"), sourcePostHandler)
- mux.HandleFuncC(pat.Put(prefix+"/source/:sourceid/binary"), sourcePutBinaryHandler)
- mux.HandleFuncC(pat.Get(prefix+"/source/:sourceid/binary"), sourceGetBinaryHandler)
- mux.HandleFuncC(pat.Get(prefix+fetchPrefix+":accessid"), fetchHandler)
- mux.HandleFuncC(pat.Get(prefix+"/raw/*"), rawHandler)
- mux.Handle(pat.Get(prefix+"/*"), http.FileServer(http.Dir("static")))
- //mux.HandleC(pat.Get(prefix+"/"), logging("static", goji.FileServer(http.Dir("static"))))
- //err := http.ListenAndServeTLS(":10443", "cert.pem", "key.pem", nil)
- //if err := http.ListenAndServe(":"+portStr, nil); err != nil {
- if err := http.ListenAndServe(":"+portStr, mux); err != nil {
- log.Fatal(err)
- }
- }
- func prepareDatabase() (*DaisyDB, error) {
- os.Mkdir(storePath, pathMode)
- var db DaisyDB
- dbFileName := path.Join(storePath, viper.GetString("dbfile"))
- log.Println("Opening DB: " + dbFileName)
- lDB, err := bolt.Open(dbFileName, 0600, &bolt.Options{Timeout: 5 * time.Second})
- db.ServerStartTime = time.Now().Round(time.Second)
- db.DB = lDB
- if err := initDB(db.DB); err != nil {
- return nil, err
- }
- db.DBStats = db.DB.Stats()
- countObjects(&db)
- if err != nil {
- return nil, fmt.Errorf("Opening %s: %s", dbFileName, err)
- }
- catchCtrlC(db.DB)
- return &db, nil
- }
- func setupViper() {
- viper.SetConfigName("daisy") // name of config file (without extension)
- viper.AddConfigPath(".") //current
- viper.SetDefault("port", 8090)
- viper.SetDefault("uuid-style", "base64")
- viper.SetDefault("fetch-prefix", "/g/")
- viper.SetDefault("dump-file", "db_dump_current.tsv")
- viper.SetDefault("backup-interval", time.Duration(8*time.Hour))
- viper.SetDefault("pulse-interval", time.Duration(5*time.Minute))
- viper.SetDefault("store", "store")
- viper.SetDefault("logs", "logs")
- viper.SetDefault("dbfile", "daisy.bolt.db")
- viper.SetDefault("default-end-months", 3)
- viper.SetDefault("retention-days", 7)
- viper.SetDefault("stacktrace", false)
- if err := viper.ReadInConfig(); err == nil {
- log.Println("Using config file: " + viper.ConfigFileUsed())
- } else {
- panic(fmt.Errorf("could not load config file"))
- }
- urlPathPrefix = viper.GetString("server") + viper.GetString("prefix") + viper.GetString("fetch-prefix")
- storePath = viper.GetString("store")
- logsPath = viper.GetString("logs")
- }
- func main() {
- log.Println("--== Daisy ==--")
- log.Println("2016 J Ramb")
- log.Println("Version:", daisyVersion)
- flag.Parse()
- args := flag.Args()
- //if cfgFile != "" { // enable ability to specify config file via flag
- //viper.SetConfigFile(cfgFile)
- //}
- setupViper()
- openLogFiles()
- var err error
- if gDB, err = prepareDatabase(); err != nil {
- log.Fatal(err)
- }
- defer func() {
- gDB.DB.Close()
- log.Println("Database closed")
- }()
- var command string
- if len(args) > 0 {
- command = args[0]
- } else {
- command = "server" // default
- }
- log.Println("Command line command:", command)
- switch command {
- case "server":
- gDB.dbNeedsBackup = true
- go periodicExecution(gDB) // simple periodic execution
- go pulseExecution(gDB) // simple periodic execution
- listenHTTP()
- case "backup":
- {
- gDB.dbNeedsBackup = true
- log.Println(backupDB(gDB))
- }
- case "dump":
- err = databaseDump(gDB, "")
- if err != nil {
- log.Println(err)
- }
- default:
- log.Println("Invalid cmd:", command)
- }
- }