logger pkg
This commit is contained in:
parent
b9c4553577
commit
a17926f54b
@ -7,7 +7,7 @@ import (
|
||||
"path"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/filter"
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||
)
|
||||
|
||||
@ -25,7 +25,7 @@ func main() {
|
||||
outDir := flag.String("out", "html", "output directory")
|
||||
createOutDir := flag.Bool("create", false, "create output directory if not existing")
|
||||
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
|
||||
logLevel := flag.String("logLevel", "info", "log level: debug, info, warning, error")
|
||||
logLevel := flag.String("logLevel", "notice", "log level: debug, info, notice, warning, error")
|
||||
version := flag.Bool("version", false, "print version of this executable")
|
||||
|
||||
flag.Parse()
|
||||
@ -37,60 +37,60 @@ func main() {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
level := "info"
|
||||
level := "notice"
|
||||
if logLevel != nil {
|
||||
level = *logLevel
|
||||
}
|
||||
helper.ConfigureLogger(level)
|
||||
logger.SetLogLevel(level)
|
||||
|
||||
if inDir == nil || *inDir == "" {
|
||||
helper.Log.Panic("input directory not specified")
|
||||
logger.Log.Panic("input directory not specified")
|
||||
}
|
||||
iDir := path.Clean(*inDir)
|
||||
inDir = &iDir
|
||||
helper.Log.Infof("input directory: %s", *inDir)
|
||||
logger.Log.Infof("input directory: %s", *inDir)
|
||||
|
||||
if outDir == nil || *outDir == "" {
|
||||
helper.Log.Panic("output directory not specified")
|
||||
logger.Log.Panic("output directory not specified")
|
||||
}
|
||||
oDir := path.Clean(*outDir)
|
||||
outDir = &oDir
|
||||
helper.Log.Infof("output directory: %s", *outDir)
|
||||
logger.Log.Infof("output directory: %s", *outDir)
|
||||
|
||||
if createOutDir != nil && *createOutDir {
|
||||
if _, err := os.Stat(*outDir); os.IsNotExist(err) {
|
||||
helper.Log.Debugf("output directory '%s' does not exist", *outDir)
|
||||
helper.Log.Debugf("trying to create output directory: %s", *outDir)
|
||||
logger.Log.Debugf("output directory '%s' does not exist", *outDir)
|
||||
logger.Log.Debugf("trying to create output directory: %s", *outDir)
|
||||
err := os.MkdirAll(*outDir, 0755)
|
||||
if err != nil {
|
||||
helper.Log.Panic(err)
|
||||
logger.Log.Panic(err)
|
||||
}
|
||||
helper.Log.Noticef("created output directory: %s", *outDir)
|
||||
logger.Log.Noticef("created output directory: %s", *outDir)
|
||||
} else {
|
||||
helper.Log.Noticef("output directory '%s' already exists", *outDir)
|
||||
logger.Log.Noticef("output directory '%s' already exists", *outDir)
|
||||
}
|
||||
}
|
||||
|
||||
if fD, err := os.Stat(*outDir); os.IsNotExist(err) {
|
||||
helper.Log.Panicf("output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
||||
logger.Log.Panicf("output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
||||
} else {
|
||||
if fD == nil {
|
||||
helper.Log.Panicf("something went wrong, could not get file handle for output dir %s", *outDir)
|
||||
logger.Log.Panicf("something went wrong, could not get file handle for output dir %s", *outDir)
|
||||
} else if !fD.IsDir() {
|
||||
helper.Log.Panicf("output directory '%s' is not a directory", *outDir)
|
||||
logger.Log.Panicf("output directory '%s' is not a directory", *outDir)
|
||||
}
|
||||
}
|
||||
|
||||
helper.Log.Debug("reading global config...")
|
||||
logger.Log.Debug("reading global config...")
|
||||
configFilename := *inDir + "/config.yml"
|
||||
err := mark2web.Config.ReadFromFile(configFilename)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read file '%s': %s", configFilename, err)
|
||||
logger.Log.Panicf("could not read file '%s': %s", configFilename, err)
|
||||
}
|
||||
mark2web.Config.Directories.Input = *inDir
|
||||
mark2web.Config.Directories.Output = *outDir
|
||||
|
||||
helper.Log.Debugf("reading input directory %s", *inDir)
|
||||
logger.Log.Debugf("reading input directory %s", *inDir)
|
||||
|
||||
defaultTemplate := "base.html"
|
||||
defaultInputFile := "README.md"
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||
"github.com/ddliu/motto"
|
||||
"github.com/flosch/pongo2"
|
||||
@ -16,7 +16,7 @@ import (
|
||||
func RegisterFilters(dir string) {
|
||||
files, err := ioutil.ReadDir(dir)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read from template filters dir '%s': %s", dir, err)
|
||||
logger.Log.Panicf("could not read from template filters dir '%s': %s", dir, err)
|
||||
}
|
||||
for _, f := range files {
|
||||
if !f.IsDir() {
|
||||
@ -24,7 +24,7 @@ func RegisterFilters(dir string) {
|
||||
case ".js":
|
||||
fileBase := strings.TrimSuffix(f.Name(), ".js")
|
||||
jsFile := dir + "/" + f.Name()
|
||||
helper.Log.Debugf("trying to register filter from: %s", jsFile)
|
||||
logger.Log.Debugf("trying to register filter from: %s", jsFile)
|
||||
/*
|
||||
jsStr, err := ioutil.ReadFile(jsFile)
|
||||
if err != nil {
|
||||
@ -34,10 +34,10 @@ func RegisterFilters(dir string) {
|
||||
vm := motto.New()
|
||||
fn, err := vm.Run(jsFile)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("error in javascript vm for '%s': %s", jsFile, err)
|
||||
logger.Log.Panicf("error in javascript vm for '%s': %s", jsFile, err)
|
||||
}
|
||||
if !fn.IsFunction() {
|
||||
helper.Log.Panicf("%s does not contain a function code", jsFile)
|
||||
logger.Log.Panicf("%s does not contain a function code", jsFile)
|
||||
}
|
||||
|
||||
err = pongo2.RegisterFilter(
|
||||
@ -49,21 +49,21 @@ func RegisterFilters(dir string) {
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not set context as in '%s': %s", jsFile, err)
|
||||
logger.Log.Panicf("could not set context as in '%s': %s", jsFile, err)
|
||||
}
|
||||
ret, err := fn.Call(thisObj.Value(), in.Interface(), param.Interface())
|
||||
if err != nil {
|
||||
helper.Log.Panicf("error in javascript file '%s' while calling returned function: %s", jsFile, err)
|
||||
logger.Log.Panicf("error in javascript file '%s' while calling returned function: %s", jsFile, err)
|
||||
}
|
||||
retGo, err := ret.Export()
|
||||
if err != nil {
|
||||
helper.Log.Panicf("export error for '%s': %s", jsFile, err)
|
||||
logger.Log.Panicf("export error for '%s': %s", jsFile, err)
|
||||
}
|
||||
return pongo2.AsValue(retGo), nil
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not register filter from '%s': %s", jsFile, err)
|
||||
logger.Log.Panicf("could not register filter from '%s': %s", jsFile, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||
"github.com/disintegration/imaging"
|
||||
"github.com/flosch/pongo2"
|
||||
@ -149,7 +150,7 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
|
||||
|
||||
pt := path.Dir(imgTarget)
|
||||
if _, err := os.Stat(pt); os.IsNotExist(err) {
|
||||
helper.Log.Infof("create image target dir: %s", pt)
|
||||
logger.Log.Infof("create image target dir: %s", pt)
|
||||
if err := os.MkdirAll(pt, 0755); err != nil {
|
||||
return nil, &pongo2.Error{
|
||||
Sender: "filter:image_resize",
|
||||
@ -165,16 +166,16 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
|
||||
}
|
||||
|
||||
if f, err := os.Stat(imgTarget); err == nil && !f.IsDir() {
|
||||
helper.Log.Noticef("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
|
||||
logger.Log.Noticef("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
|
||||
} else {
|
||||
mark2web.ThreadStart(func() {
|
||||
helper.Log.Noticef("processing image from %s to %s", imgSource, imgTarget)
|
||||
logger.Log.Noticef("processing image from %s to %s", imgSource, imgTarget)
|
||||
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
|
||||
// webrequest before finding target filename, because of file format in filename
|
||||
} else {
|
||||
img, err = imaging.Open(imgSource, imaging.AutoOrientation(true))
|
||||
if err != nil {
|
||||
helper.Log.Panicf("filter:image_resize, could not open image '%s': %s", imgSource, err)
|
||||
logger.Log.Panicf("filter:image_resize, could not open image '%s': %s", imgSource, err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,11 +208,11 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
|
||||
case "bottomright":
|
||||
anchor = imaging.BottomRight
|
||||
default:
|
||||
helper.Log.Panicf("filter:image_resize, unknown anchor a=%s definition", p.Anchor)
|
||||
logger.Log.Panicf("filter:image_resize, unknown anchor a=%s definition", p.Anchor)
|
||||
}
|
||||
img = imaging.Fill(img, p.Width, p.Height, anchor, imaging.Lanczos)
|
||||
default:
|
||||
helper.Log.Panicf("filter:image_resize, invalid p parameter '%s'", p.Process)
|
||||
logger.Log.Panicf("filter:image_resize, invalid p parameter '%s'", p.Process)
|
||||
}
|
||||
|
||||
var encodeOptions = make([]imaging.EncodeOption, 0)
|
||||
@ -221,9 +222,9 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
|
||||
|
||||
err = imaging.Save(img, imgTarget, encodeOptions...)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("filter:image_resize, could save image '%s': %s", imgTarget, err)
|
||||
logger.Log.Panicf("filter:image_resize, could save image '%s': %s", imgTarget, err)
|
||||
}
|
||||
helper.Log.Noticef("finished image: %s", imgTarget)
|
||||
logger.Log.Noticef("finished image: %s", imgTarget)
|
||||
})
|
||||
}
|
||||
return pongo2.AsValue(mark2web.CurrentTreeNode.ResolveNavPath(p.Filename)), nil
|
||||
|
@ -2,25 +2,27 @@ package helper
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
// CreateDirectory creates direcory with all missing parents and panic if error
|
||||
func CreateDirectory(dir string) {
|
||||
Log.Debugf("trying to create output directory: %s", dir)
|
||||
logger.Log.Debugf("trying to create output directory: %s", dir)
|
||||
|
||||
if dirH, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
err := os.MkdirAll(dir, 0755)
|
||||
if err != nil {
|
||||
Log.Panicf("could not create output directory '%s': %s", dir, err)
|
||||
logger.Log.Panicf("could not create output directory '%s': %s", dir, err)
|
||||
}
|
||||
Log.Noticef("created output directory: %s", dir)
|
||||
logger.Log.Noticef("created output directory: %s", dir)
|
||||
} else if dirH != nil {
|
||||
if dirH.IsDir() {
|
||||
Log.Noticef("output directory '%s' already exists", dir)
|
||||
logger.Log.Noticef("output directory '%s' already exists", dir)
|
||||
} else {
|
||||
Log.Panicf("output directory '%s' is no directory", dir)
|
||||
logger.Log.Panicf("output directory '%s' is no directory", dir)
|
||||
}
|
||||
} else {
|
||||
Log.Panicf("unknown error for output directory '%s': %s", dir, err)
|
||||
logger.Log.Panicf("unknown error for output directory '%s': %s", dir, err)
|
||||
}
|
||||
}
|
||||
|
@ -5,27 +5,29 @@ import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
// JSONWebRequest will GET a json object/array from a given URL
|
||||
func JSONWebRequest(url string) interface{} {
|
||||
Log.Noticef("requesting url via GET %s", url)
|
||||
logger.Log.Noticef("requesting url via GET %s", url)
|
||||
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
Log.Panicf("could not get url '%s': %s", url, err)
|
||||
logger.Log.Panicf("could not get url '%s': %s", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
Log.Panicf("could not read body from url '%s': %s", url, err)
|
||||
logger.Log.Panicf("could not read body from url '%s': %s", url, err)
|
||||
}
|
||||
|
||||
Log.Debugf("output from url '%s':\n%s", url, string(body))
|
||||
logger.Log.Debugf("output from url '%s':\n%s", url, string(body))
|
||||
|
||||
if resp.StatusCode >= 400 {
|
||||
Log.Panicf("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
|
||||
logger.Log.Panicf("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
|
||||
}
|
||||
|
||||
contentType := resp.Header.Get("Content-Type")
|
||||
@ -33,7 +35,7 @@ func JSONWebRequest(url string) interface{} {
|
||||
if strings.Contains(contentType, "json") {
|
||||
|
||||
} else {
|
||||
Log.Panicf("is not json '%s' from url '%s'", contentType, url)
|
||||
logger.Log.Panicf("is not json '%s' from url '%s'", contentType, url)
|
||||
}
|
||||
|
||||
jsonMap := make(map[string]interface{})
|
||||
@ -48,6 +50,6 @@ func JSONWebRequest(url string) interface{} {
|
||||
return jsonArrayMap
|
||||
}
|
||||
|
||||
Log.Panicf("could not read json from '%s': invalid type", url)
|
||||
logger.Log.Panicf("could not read json from '%s': invalid type", url)
|
||||
return nil
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package helper
|
||||
package logger
|
||||
|
||||
import (
|
||||
"os"
|
||||
@ -9,15 +9,11 @@ import (
|
||||
|
||||
// Log is global logger
|
||||
var Log = logging.MustGetLogger("myLogger")
|
||||
var logBackendLeveled logging.LeveledBackend
|
||||
|
||||
// ConfigureLogger sets logger backend and level
|
||||
func ConfigureLogger(level string) {
|
||||
logBackend := logging.NewLogBackend(os.Stderr, "", 0)
|
||||
logBackendFormatter := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(
|
||||
`%{color}%{time:15:04:05.000} %{shortfunc} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,
|
||||
))
|
||||
logBackendLeveled := logging.AddModuleLevel(logBackendFormatter)
|
||||
logBackendLevel := logging.INFO
|
||||
// SetLogLevel sets log level for global logger (debug, info, notice, warning, error)
|
||||
func SetLogLevel(level string) {
|
||||
logBackendLevel := logging.NOTICE
|
||||
switch level {
|
||||
case "debug":
|
||||
logBackendLevel = logging.DEBUG
|
||||
@ -40,7 +36,18 @@ func ConfigureLogger(level string) {
|
||||
break
|
||||
|
||||
}
|
||||
|
||||
logBackendLeveled.SetLevel(logBackendLevel, "")
|
||||
}
|
||||
|
||||
// configureLogger sets logger backend and level
|
||||
func configureLogger() {
|
||||
logBackend := logging.NewLogBackend(os.Stderr, "", 0)
|
||||
logBackendFormatter := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(
|
||||
`%{color}%{time:15:04:05.000} %{shortfunc} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,
|
||||
))
|
||||
logBackendLeveled = logging.AddModuleLevel(logBackendFormatter)
|
||||
logBackendLeveled.SetLevel(logging.NOTICE, "")
|
||||
logging.SetBackend(logBackendLeveled)
|
||||
}
|
||||
|
||||
@ -49,4 +56,5 @@ func init() {
|
||||
spew.Config.DisableCapacities = true
|
||||
spew.Config.DisableMethods = true
|
||||
spew.Config.DisablePointerMethods = true
|
||||
configureLogger()
|
||||
}
|
@ -6,7 +6,7 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
cpy "github.com/otiai10/copy"
|
||||
)
|
||||
|
||||
@ -22,10 +22,10 @@ func ProcessAssets() {
|
||||
if !strings.HasPrefix(to, "/") {
|
||||
to = Config.Directories.Output + "/" + to
|
||||
}
|
||||
helper.Log.Noticef("copying assets from '%s' to '%s'", from, to)
|
||||
logger.Log.Noticef("copying assets from '%s' to '%s'", from, to)
|
||||
err := cpy.Copy(from, to)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
|
||||
logger.Log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
|
||||
}
|
||||
|
||||
if Config.Assets.Compress {
|
||||
@ -37,7 +37,7 @@ func ProcessAssets() {
|
||||
// fixAssetsPath replaces assets path based on current path
|
||||
func (node *TreeNode) fixAssetsPath(str string) string {
|
||||
if find := Config.Assets.FixTemplate.Find; find != "" {
|
||||
helper.Log.Debugf("fixing assets paths for path '%s'", node.CurrentNavPath())
|
||||
logger.Log.Debugf("fixing assets paths for path '%s'", node.CurrentNavPath())
|
||||
repl := Config.Assets.FixTemplate.Replace
|
||||
toPath := Config.Assets.ToPath
|
||||
|
||||
@ -48,7 +48,7 @@ func (node *TreeNode) fixAssetsPath(str string) string {
|
||||
}
|
||||
repl = bToRoot + toPath + "/" + repl
|
||||
repl = path.Clean(repl) + "/"
|
||||
helper.Log.Debugf("new assets paths: %s", repl)
|
||||
logger.Log.Debugf("new assets paths: %s", repl)
|
||||
return regex.ReplaceAllString(str, repl)
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"github.com/itchio/go-brotli/enc"
|
||||
)
|
||||
|
||||
@ -15,11 +15,11 @@ var brotliSupported = true
|
||||
func handleBrotliCompression(filename string, content []byte) {
|
||||
brFilename := filename + ".br"
|
||||
|
||||
helper.Log.Infof("writing to compressed output file: %s", brFilename)
|
||||
logger.Log.Infof("writing to compressed output file: %s", brFilename)
|
||||
|
||||
f, err := os.Create(brFilename)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not create file '%s': %s", brFilename, err)
|
||||
logger.Log.Panicf("could not create file '%s': %s", brFilename, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
@ -30,19 +30,19 @@ func handleBrotliCompression(filename string, content []byte) {
|
||||
// content given
|
||||
_, err = bw.Write(content)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write brotli content for '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not write brotli content for '%s': %s", filename, err)
|
||||
}
|
||||
} else {
|
||||
// read file
|
||||
r, err := os.Open(filename)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not open file '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not open file '%s': %s", filename, err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
_, err = io.Copy(bw, r)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write brotli file for '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not write brotli file for '%s': %s", filename, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/flosch/pongo2"
|
||||
)
|
||||
@ -23,11 +24,11 @@ func (node *TreeNode) handleCollections() {
|
||||
collections := append(node.Config.Collections, node.Config.This.Collections...)
|
||||
for _, colConfig := range collections {
|
||||
if colConfig.Name == nil || *colConfig.Name == "" {
|
||||
helper.Log.Panicf("missing Name in collection config in '%s'", node.InputPath)
|
||||
logger.Log.Panicf("missing Name in collection config in '%s'", node.InputPath)
|
||||
}
|
||||
if (colConfig.URL == nil || *colConfig.URL == "") &&
|
||||
(colConfig.Directory == nil) {
|
||||
helper.Log.Panicf("missing URL and Directory in collection config in '%s'", node.InputPath)
|
||||
logger.Log.Panicf("missing URL and Directory in collection config in '%s'", node.InputPath)
|
||||
}
|
||||
|
||||
if node.ColMap == nil {
|
||||
@ -45,7 +46,7 @@ func (node *TreeNode) handleCollections() {
|
||||
if colConfig.URL != nil {
|
||||
url, err := pongo2.RenderTemplateString(*colConfig.URL, ctx)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for Collection Element.URL in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for Collection Element.URL in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
|
||||
errSrcText = "URL " + url
|
||||
@ -55,7 +56,7 @@ func (node *TreeNode) handleCollections() {
|
||||
colData = cacheEntry.data
|
||||
cacheEntry.hit++
|
||||
} else {
|
||||
helper.Log.Noticef("reading collection from: %s", errSrcText)
|
||||
logger.Log.Noticef("reading collection from: %s", errSrcText)
|
||||
colData = helper.JSONWebRequest(url)
|
||||
colCache[url] = &colCacheEntry{
|
||||
data: colData,
|
||||
@ -67,10 +68,10 @@ func (node *TreeNode) handleCollections() {
|
||||
path := node.ResolveInputPath(colConfig.Directory.Path)
|
||||
errSrcText = "DIR " + path
|
||||
|
||||
helper.Log.Noticef("reading collection from: %s", errSrcText)
|
||||
logger.Log.Noticef("reading collection from: %s", errSrcText)
|
||||
d, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read directory '%s': %s", path, err)
|
||||
logger.Log.Panicf("could not read directory '%s': %s", path, err)
|
||||
}
|
||||
|
||||
mStr := "."
|
||||
@ -79,7 +80,7 @@ func (node *TreeNode) handleCollections() {
|
||||
}
|
||||
matcher, err := regexp.Compile(mStr)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not compile regex for MatchFilename '%s' in '%s': %s", mStr, path, err)
|
||||
logger.Log.Panicf("could not compile regex for MatchFilename '%s' in '%s': %s", mStr, path, err)
|
||||
}
|
||||
|
||||
if colConfig.Directory.ReverseOrder {
|
||||
@ -95,7 +96,7 @@ func (node *TreeNode) handleCollections() {
|
||||
inFile := path + "/" + fh.Name()
|
||||
md, err := ioutil.ReadFile(inFile)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read file '%s': %s", inFile, err)
|
||||
logger.Log.Panicf("could not read file '%s': %s", inFile, err)
|
||||
}
|
||||
_, ctx := node.processMarkdownWithHeader(md, inFile)
|
||||
(*ctx)["FilenameMatch"] = helper.GetRegexpParams(matcher, fh.Name())
|
||||
@ -116,16 +117,16 @@ func (node *TreeNode) handleCollections() {
|
||||
if colDataMap, ok = colData.(map[string]interface{}); ok {
|
||||
entries, ok = colDataMap[navT.EntriesAttribute].([]interface{})
|
||||
if !ok {
|
||||
helper.Log.Debug(spew.Sdump(colDataMap))
|
||||
helper.Log.Panicf("invalid json data in [%s] from '%s' for entries", navT.EntriesAttribute, errSrcText)
|
||||
logger.Log.Debug(spew.Sdump(colDataMap))
|
||||
logger.Log.Panicf("invalid json data in [%s] from '%s' for entries", navT.EntriesAttribute, errSrcText)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
entries, ok = colData.([]interface{})
|
||||
}
|
||||
if !ok {
|
||||
helper.Log.Debug(spew.Sdump(colData))
|
||||
helper.Log.Panicf("invalid json data from '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", errSrcText)
|
||||
logger.Log.Debug(spew.Sdump(colData))
|
||||
logger.Log.Panicf("invalid json data from '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", errSrcText)
|
||||
}
|
||||
|
||||
// build navigation with detail sites
|
||||
@ -133,23 +134,23 @@ func (node *TreeNode) handleCollections() {
|
||||
ctxE := make(pongo2.Context)
|
||||
err := helper.Merge(&ctxE, ctx)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
var jsonCtx map[string]interface{}
|
||||
if jsonCtx, ok = colEl.(map[string]interface{}); !ok {
|
||||
helper.Log.Debug(spew.Sdump(colEl))
|
||||
helper.Log.Panicf("no json object for entry index %d from '%s'", idx, errSrcText)
|
||||
logger.Log.Debug(spew.Sdump(colEl))
|
||||
logger.Log.Panicf("no json object for entry index %d from '%s'", idx, errSrcText)
|
||||
}
|
||||
err = helper.Merge(&ctxE, pongo2.Context(jsonCtx))
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
|
||||
tpl := ""
|
||||
if navT.Template != "" {
|
||||
tpl, err = pongo2.RenderTemplateString(navT.Template, ctxE)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for NavTemplate.Template in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for NavTemplate.Template in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
}
|
||||
if tpl == "" {
|
||||
@ -160,39 +161,39 @@ func (node *TreeNode) handleCollections() {
|
||||
if navT.DataKey != "" {
|
||||
dataKey, err = pongo2.RenderTemplateString(navT.DataKey, ctxE)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for NavTemplate.DataKey in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for NavTemplate.DataKey in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
goTo, err := pongo2.RenderTemplateString(navT.GoTo, ctxE)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for NavTemplate.GoTo in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for NavTemplate.GoTo in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
goTo = strings.Trim(goTo, "/")
|
||||
goTo = path.Clean(goTo)
|
||||
|
||||
if strings.Contains(goTo, "..") {
|
||||
helper.Log.Panicf("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", node.InputPath, goTo)
|
||||
logger.Log.Panicf("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", node.InputPath, goTo)
|
||||
}
|
||||
if goTo == "." {
|
||||
helper.Log.Panicf("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||
logger.Log.Panicf("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||
}
|
||||
if goTo == "" {
|
||||
helper.Log.Panicf("missing NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||
logger.Log.Panicf("missing NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||
}
|
||||
|
||||
navname := ""
|
||||
if navT.Navname != "" {
|
||||
navname, err = pongo2.RenderTemplateString(navT.Navname, ctxE)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for NavTemplate.Navname in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for NavTemplate.Navname in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
}
|
||||
body := ""
|
||||
if navT.Body != "" {
|
||||
body, err = pongo2.RenderTemplateString(navT.Body, ctxE)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("invalid template string for NavTemplate.Body in '%s': %s", node.InputPath, err)
|
||||
logger.Log.Panicf("invalid template string for NavTemplate.Body in '%s': %s", node.InputPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -200,7 +201,7 @@ func (node *TreeNode) handleCollections() {
|
||||
l > 0 &&
|
||||
navname == colCache[cacheKey].navnames[l-1] {
|
||||
// navname before used same url, so recursion loop
|
||||
helper.Log.Panicf("collection request loop detected for in '%s' for : %s", node.InputPath, errSrcText)
|
||||
logger.Log.Panicf("collection request loop detected for in '%s' for : %s", node.InputPath, errSrcText)
|
||||
}
|
||||
|
||||
colCache[cacheKey].navnames = append(colCache[cacheKey].navnames, navname)
|
||||
|
@ -7,7 +7,7 @@ import (
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
func handleCompression(filename string, content []byte) {
|
||||
@ -21,17 +21,17 @@ func handleCompression(filename string, content []byte) {
|
||||
if Config.Compress.GZIP {
|
||||
gzFilename := filename + ".gz"
|
||||
|
||||
helper.Log.Infof("writing to compressed output file: %s", gzFilename)
|
||||
logger.Log.Infof("writing to compressed output file: %s", gzFilename)
|
||||
|
||||
f, err := os.Create(gzFilename)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not create file '%s': %s", gzFilename, err)
|
||||
logger.Log.Panicf("could not create file '%s': %s", gzFilename, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
zw, err := gzip.NewWriterLevel(f, gzip.BestCompression)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not initialize gzip writer for '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not initialize gzip writer for '%s': %s", filename, err)
|
||||
}
|
||||
defer zw.Close()
|
||||
|
||||
@ -39,19 +39,19 @@ func handleCompression(filename string, content []byte) {
|
||||
// content given
|
||||
_, err = zw.Write(content)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write gziped content for '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not write gziped content for '%s': %s", filename, err)
|
||||
}
|
||||
} else {
|
||||
// read file
|
||||
r, err := os.Open(filename)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not open file '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not open file '%s': %s", filename, err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
_, err = io.Copy(zw, r)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not gzip file '%s': %s", filename, err)
|
||||
logger.Log.Panicf("could not gzip file '%s': %s", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -61,13 +61,13 @@ func handleCompression(filename string, content []byte) {
|
||||
}
|
||||
|
||||
func compressFilesInDir(dir string) {
|
||||
helper.Log.Noticef("compressing configured files in: %s", dir)
|
||||
logger.Log.Noticef("compressing configured files in: %s", dir)
|
||||
|
||||
var _processDir func(string)
|
||||
_processDir = func(d string) {
|
||||
entries, err := ioutil.ReadDir(d)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read dir '%s': %s", d, err)
|
||||
logger.Log.Panicf("could not read dir '%s': %s", d, err)
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/flosch/pongo2"
|
||||
cpy "github.com/otiai10/copy"
|
||||
@ -24,7 +25,7 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
|
||||
|
||||
files, err := ioutil.ReadDir(node.InputPath)
|
||||
if err != nil {
|
||||
helper.Log.Panic(err)
|
||||
logger.Log.Panic(err)
|
||||
}
|
||||
|
||||
// first only files
|
||||
@ -33,14 +34,14 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
|
||||
if !f.IsDir() && f.Name() != "config.yml" {
|
||||
switch path.Ext(f.Name()) {
|
||||
case ".md":
|
||||
helper.Log.Debugf(".MD %s", p)
|
||||
logger.Log.Debugf(".MD %s", p)
|
||||
if node.InputFiles == nil {
|
||||
node.InputFiles = make([]string, 0)
|
||||
}
|
||||
node.InputFiles = append(node.InputFiles, f.Name())
|
||||
break
|
||||
default:
|
||||
helper.Log.Debugf("FIL %s", p)
|
||||
logger.Log.Debugf("FIL %s", p)
|
||||
if node.OtherFiles == nil {
|
||||
node.OtherFiles = make([]string, 0)
|
||||
}
|
||||
@ -53,7 +54,7 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
|
||||
for _, f := range files {
|
||||
p := node.InputPath + "/" + f.Name()
|
||||
if f.IsDir() {
|
||||
helper.Log.Debugf("DIR %s", p)
|
||||
logger.Log.Debugf("DIR %s", p)
|
||||
newTree := new(TreeNode)
|
||||
newTree.root = node.root
|
||||
if node.Sub == nil {
|
||||
@ -75,18 +76,18 @@ func (node *TreeNode) processMarkdownWithHeader(md []byte, errorRef string) (*Pa
|
||||
// replace tabs
|
||||
yamlData = bytes.Replace(yamlData, []byte("\t"), []byte(" "), -1)
|
||||
|
||||
helper.Log.Debugf("found yaml header in '%s', merging config", errorRef)
|
||||
logger.Log.Debugf("found yaml header in '%s', merging config", errorRef)
|
||||
err := yaml.Unmarshal(yamlData, newConfig)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not parse YAML header from '%s': %s", errorRef, err)
|
||||
logger.Log.Panicf("could not parse YAML header from '%s': %s", errorRef, err)
|
||||
}
|
||||
|
||||
helper.Log.Debug("merging config with upper config")
|
||||
logger.Log.Debug("merging config with upper config")
|
||||
oldThis := newConfig.This
|
||||
helper.Merge(newConfig, node.Config)
|
||||
newConfig.This = oldThis
|
||||
|
||||
helper.Log.Debug(spew.Sdump(newConfig))
|
||||
logger.Log.Debug(spew.Sdump(newConfig))
|
||||
|
||||
md = headerRegex.ReplaceAll(md, []byte(""))
|
||||
} else {
|
||||
@ -184,26 +185,26 @@ func (node *TreeNode) ProcessContent() {
|
||||
if ignoreRegex != nil && *ignoreRegex != "" {
|
||||
regex, err := regexp.Compile(*ignoreRegex)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
|
||||
logger.Log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
|
||||
}
|
||||
ignoreFile = regex.MatchString(file)
|
||||
}
|
||||
}
|
||||
|
||||
if ignoreFile {
|
||||
helper.Log.Infof("ignoring file '%s', because of filename.ignore", inFile)
|
||||
logger.Log.Infof("ignoring file '%s', because of filename.ignore", inFile)
|
||||
} else {
|
||||
var input []byte
|
||||
|
||||
if file != "" {
|
||||
helper.Log.Debugf("reading file: %s", inFile)
|
||||
logger.Log.Debugf("reading file: %s", inFile)
|
||||
|
||||
var err error
|
||||
input, err = ioutil.ReadFile(inFile)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read '%s':%s", inFile, err)
|
||||
logger.Log.Panicf("could not read '%s':%s", inFile, err)
|
||||
}
|
||||
helper.Log.Infof("processing input file '%s'", inFile)
|
||||
logger.Log.Infof("processing input file '%s'", inFile)
|
||||
} else {
|
||||
// use input string if available and input filename == ""
|
||||
var inputString *string
|
||||
@ -211,7 +212,7 @@ func (node *TreeNode) ProcessContent() {
|
||||
inputString = i.InputString
|
||||
}
|
||||
if inputString != nil {
|
||||
helper.Log.Debugf("using input string instead of file")
|
||||
logger.Log.Debugf("using input string instead of file")
|
||||
input = []byte(*inputString)
|
||||
}
|
||||
}
|
||||
@ -244,7 +245,7 @@ func (node *TreeNode) ProcessContent() {
|
||||
if stripRegex != nil && *stripRegex != "" {
|
||||
regex, err := regexp.Compile(*stripRegex)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
|
||||
logger.Log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
|
||||
}
|
||||
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
|
||||
}
|
||||
@ -254,20 +255,20 @@ func (node *TreeNode) ProcessContent() {
|
||||
}
|
||||
|
||||
outFile := node.OutputPath + "/" + outputFilename
|
||||
helper.Log.Debugf("using '%s' as output file", outFile)
|
||||
helper.Log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
||||
logger.Log.Debugf("using '%s' as output file", outFile)
|
||||
logger.Log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
||||
templateFilename := *newConfig.Template
|
||||
result, err := renderTemplate(*newConfig.Template, node, newConfig, ctx)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not execute template '%s' for input file '%s': %s", templateFilename, inFile, err)
|
||||
logger.Log.Panicf("could not execute template '%s' for input file '%s': %s", templateFilename, inFile, err)
|
||||
}
|
||||
|
||||
result = node.fixAssetsPath(result)
|
||||
|
||||
helper.Log.Noticef("writing to output file: %s", outFile)
|
||||
logger.Log.Noticef("writing to output file: %s", outFile)
|
||||
err = ioutil.WriteFile(outFile, []byte(result), 0644)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write to output file '%s': %s", outFile, err)
|
||||
logger.Log.Panicf("could not write to output file '%s': %s", outFile, err)
|
||||
}
|
||||
|
||||
handleCompression(outFile, []byte(result))
|
||||
@ -282,10 +283,10 @@ func (node *TreeNode) ProcessContent() {
|
||||
case "copy":
|
||||
from := node.InputPath + "/" + file
|
||||
to := node.OutputPath + "/" + file
|
||||
helper.Log.Noticef("copying file from '%s' to '%s'", from, to)
|
||||
logger.Log.Noticef("copying file from '%s' to '%s'", from, to)
|
||||
err := cpy.Copy(from, to)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
|
||||
logger.Log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
|
||||
}
|
||||
|
||||
handleCompression(to, nil)
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/extemporalgenome/slug"
|
||||
"github.com/flosch/pongo2"
|
||||
@ -41,36 +42,36 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
|
||||
inPath += "/" + subDir
|
||||
}
|
||||
|
||||
helper.Log.Infof("reading input directory: %s", inPath)
|
||||
logger.Log.Infof("reading input directory: %s", inPath)
|
||||
|
||||
node.InputPath = inPath
|
||||
|
||||
// read config
|
||||
newConfig := new(PathConfig)
|
||||
helper.Log.Debug("looking for config.yml ...")
|
||||
logger.Log.Debug("looking for config.yml ...")
|
||||
configFile := inPath + "/config.yml"
|
||||
if _, err := os.Stat(configFile); os.IsNotExist(err) {
|
||||
helper.Log.Debug("no config.yml found in this directory, using upper configs")
|
||||
logger.Log.Debug("no config.yml found in this directory, using upper configs")
|
||||
helper.Merge(newConfig, conf)
|
||||
// remove this
|
||||
newConfig.This = ThisPathConfig{}
|
||||
} else {
|
||||
helper.Log.Debug("reading config...")
|
||||
logger.Log.Debug("reading config...")
|
||||
data, err := ioutil.ReadFile(configFile)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not read file '%s': %s", configFile, err)
|
||||
logger.Log.Panicf("could not read file '%s': %s", configFile, err)
|
||||
}
|
||||
err = yaml.Unmarshal(data, newConfig)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not parse YAML file '%s': %s", configFile, err)
|
||||
logger.Log.Panicf("could not parse YAML file '%s': %s", configFile, err)
|
||||
}
|
||||
|
||||
helper.Log.Debug("merging config with upper config")
|
||||
logger.Log.Debug("merging config with upper config")
|
||||
oldThis := newConfig.This
|
||||
helper.Merge(newConfig, conf)
|
||||
newConfig.This = oldThis
|
||||
|
||||
helper.Log.Debug(spew.Sdump(newConfig))
|
||||
logger.Log.Debug(spew.Sdump(newConfig))
|
||||
}
|
||||
|
||||
node.Config = newConfig
|
||||
@ -83,7 +84,7 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
|
||||
}
|
||||
if regexStr != nil && *regexStr != "" {
|
||||
if regex, err := regexp.Compile(*regexStr); err != nil {
|
||||
helper.Log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+subDir, err)
|
||||
logger.Log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+subDir, err)
|
||||
} else {
|
||||
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
|
||||
}
|
||||
@ -98,7 +99,7 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
|
||||
outPath := outBase + "/" + stripedDir
|
||||
outPath = path.Clean(outPath)
|
||||
|
||||
helper.Log.Infof("calculated output directory: %s", outPath)
|
||||
logger.Log.Infof("calculated output directory: %s", outPath)
|
||||
node.OutputPath = outPath
|
||||
|
||||
// handle collections
|
||||
@ -129,7 +130,7 @@ func (node *TreeNode) addSubNode(tplFilename, subDir string, navname string, ctx
|
||||
mergedConfig := new(PathConfig)
|
||||
err := helper.Merge(mergedConfig, node.Config)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("merge of path config failed: %s", err)
|
||||
logger.Log.Panicf("merge of path config failed: %s", err)
|
||||
}
|
||||
// dont merge Data[DataKey]
|
||||
if dataMapKey != "" {
|
||||
@ -139,7 +140,7 @@ func (node *TreeNode) addSubNode(tplFilename, subDir string, navname string, ctx
|
||||
}
|
||||
err = helper.Merge(mergedConfig, newPathConfig)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("merge of path config failed: %s", err)
|
||||
logger.Log.Panicf("merge of path config failed: %s", err)
|
||||
}
|
||||
|
||||
newNode.fillConfig(
|
||||
|
@ -5,19 +5,19 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
func htaccessRedirect(outDir, goTo string) {
|
||||
switch Config.Webserver.Type {
|
||||
case "apache":
|
||||
htaccessFile := outDir + "/.htaccess"
|
||||
helper.Log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goTo)
|
||||
logger.Log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goTo)
|
||||
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
|
||||
RewriteRule ^$ %{REQUEST_URI}`+goTo+`/ [R,L]
|
||||
`), 0644)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write '%s': %s", htaccessFile, err)
|
||||
logger.Log.Panicf("could not write '%s': %s", htaccessFile, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -90,10 +90,10 @@ RemoveLanguage .br
|
||||
|
||||
if configStr != "" {
|
||||
htaccessFile := Config.Directories.Output + "/.htaccess"
|
||||
helper.Log.Noticef("writing webserver config to: %s", htaccessFile)
|
||||
logger.Log.Noticef("writing webserver config to: %s", htaccessFile)
|
||||
err := ioutil.WriteFile(htaccessFile, []byte(configStr), 0644)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not write '%s': %s", htaccessFile, err)
|
||||
logger.Log.Panicf("could not write '%s': %s", htaccessFile, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
// NavElement is one element with ist attributes and subs
|
||||
@ -42,10 +43,10 @@ func buildNavigationRecursive(tree *TreeNode, curNavMap *map[string]*NavElement,
|
||||
if ignNav != nil && *ignNav != "" {
|
||||
regex, err := regexp.Compile(*ignNav)
|
||||
if err != nil {
|
||||
helper.Log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
|
||||
logger.Log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
|
||||
}
|
||||
if regex.MatchString(path.Base(el.InputPath)) {
|
||||
helper.Log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
|
||||
logger.Log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"runtime"
|
||||
"sync"
|
||||
|
||||
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||
)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
@ -12,7 +12,7 @@ var numCPU = runtime.NumCPU()
|
||||
var curNumThreads = 1 // main thread is 1
|
||||
|
||||
func init() {
|
||||
helper.Log.Infof("number of CPU core: %d", numCPU)
|
||||
logger.Log.Infof("number of CPU core: %d", numCPU)
|
||||
}
|
||||
|
||||
// Wait will wait for all our internal go threads
|
||||
@ -49,7 +49,7 @@ func ThreadStart(f func(), forceNewThread ...bool) {
|
||||
ThreadSetup()
|
||||
go threadF()
|
||||
} else {
|
||||
helper.Log.Debugf("no more CPU core (%d used), staying in main thread", curNumThreads)
|
||||
logger.Log.Debugf("no more CPU core (%d used), staying in main thread", curNumThreads)
|
||||
f()
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user