improved logging
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Sebastian Frank 2019-03-29 15:49:25 +01:00
parent 7695f42e20
commit daed37587e
Signed by: apairon
GPG Key ID: 7270D06DDA7FE8C3
15 changed files with 142 additions and 207 deletions

View File

@ -26,7 +26,7 @@ func main() {
outDir := flag.String("out", "html", "output directory")
createOutDir := flag.Bool("create", false, "create output directory if not existing")
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
logLevel := flag.String("logLevel", "notice", "log level: debug, info, notice, warning, error")
logLevel := flag.String("logLevel", "", "log level: debug, info, notice, warning, error")
progressBars := flag.Bool("progress", false, "show progress bars for jobs")
version := flag.Bool("version", false, "print version of this executable")
@ -40,26 +40,26 @@ func main() {
}
level := "notice"
if logLevel != nil {
if progressBars != nil && *progressBars {
progress.Start()
level = "warning" // disable log for progressbars
}
if logLevel != nil && *logLevel != "" {
level = *logLevel
}
logger.SetLogLevel(level)
if progressBars != nil && *progressBars {
progress.Start()
}
if inDir == nil || *inDir == "" {
logger.E("input directory not specified")
os.Exit(1)
logger.Exit("input directory not specified")
}
iDir := path.Clean(*inDir)
inDir = &iDir
logger.I("input directory: %s", *inDir)
if outDir == nil || *outDir == "" {
logger.E("output directory not specified")
os.Exit(1)
logger.Exit("output directory not specified")
}
oDir := path.Clean(*outDir)
outDir = &oDir
@ -85,8 +85,7 @@ func main() {
if fD == nil {
logger.P("something went wrong, could not get file handle for output dir %s", *outDir)
} else if !fD.IsDir() {
logger.E("output directory '%s' is not a directory", *outDir)
os.Exit(1)
logger.Exit("output directory '%s' is not a directory", *outDir)
}
}
@ -139,4 +138,6 @@ func main() {
mark2web.Run(*inDir, *outDir, defaultPathConfig)
logger.N("done")
}

View File

@ -2,7 +2,6 @@ package filter
import (
"io/ioutil"
"os"
"path"
"strings"
@ -34,8 +33,7 @@ func RegisterFilters(dir string) {
fn, err := vm.Run(jsFile)
logger.Eexit(err, "error in javascript vm for '%s'", jsFile)
if !fn.IsFunction() {
logger.E("%s does not contain a function code", jsFile)
os.Exit(1)
logger.Exit("%s does not contain a function code", jsFile)
}
err = pongo2.RegisterFilter(

View File

@ -131,7 +131,7 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
pt := path.Dir(imgTarget)
if _, err := os.Stat(pt); os.IsNotExist(err) {
logger.Log.Infof("create image target dir: %s", pt)
logger.I("create image target dir: %s", pt)
if err := os.MkdirAll(pt, 0755); err != nil {
return nil, &pongo2.Error{
Sender: "filter:image_resize",
@ -147,11 +147,11 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
}
if f, err := os.Stat(imgTarget); err == nil && !f.IsDir() {
logger.Log.Noticef("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
logger.N("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
} else {
jobm.Enqueue(jobm.Job{
Function: func() {
logger.Log.Noticef("processing image from %s to %s", imgSource, imgTarget)
logger.N("processing image from %s to %s", imgSource, imgTarget)
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
// remote file
img, p.Format, err = webrequest.GetImage(imgSource)
@ -189,11 +189,11 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
case "bottomright":
anchor = imaging.BottomRight
default:
logger.Log.Panicf("filter:image_resize, unknown anchor a=%s definition", p.Anchor)
logger.Exit("filter:image_resize, unknown anchor a=%s definition", p.Anchor)
}
img = imaging.Fill(img, p.Width, p.Height, anchor, imaging.Lanczos)
default:
logger.Log.Panicf("filter:image_resize, invalid p parameter '%s'", p.Process)
logger.Exit("filter:image_resize, invalid p parameter '%s'", p.Process)
}
if p.Format == "" {
@ -207,14 +207,13 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
err = imaging.Save(img, imgTarget, encodeOptions...)
logger.Eerr(err, "filter:image_resize, could save image '%s'", imgTarget)
default:
logger.E("filter:image_resize, invalid filename extension for image: %s", imgTarget)
os.Exit(1)
logger.Exit("filter:image_resize, invalid filename extension for image: %s", imgTarget)
}
} else {
out, err := os.Create(imgTarget)
defer out.Close()
logger.Eerr(err, "filter:image_resize, could not create image file '%s'", imgTarget)
logger.Eexit(err, "filter:image_resize, could not create image file '%s'", imgTarget)
switch p.Format {
case "jpeg", "jpg":
var jpegOpt *jpeg.Options
@ -229,12 +228,12 @@ func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *
case "gif":
err = gif.Encode(out, img, nil)
default:
logger.E("filter:image_resize, unknown format '%s' for '%s'", p.Format, imgSource)
logger.Exit("filter:image_resize, unknown format '%s' for '%s'", p.Format, imgSource)
}
logger.Eerr(err, "filter:image_resize, could not encode image file '%s'", imgTarget)
logger.Eexit(err, "filter:image_resize, could not encode image file '%s'", imgTarget)
}
logger.Log.Noticef("finished image: %s", imgTarget)
logger.N("finished image: %s", imgTarget)
},
Description: imgSource,
Category: "image process",

View File

@ -8,21 +8,20 @@ import (
// CreateDirectory creates direcory with all missing parents and panic if error
func CreateDirectory(dir string) {
logger.Log.Debugf("trying to create output directory: %s", dir)
logger.D("trying to create output directory: %s", dir)
if dirH, err := os.Stat(dir); os.IsNotExist(err) {
err := os.MkdirAll(dir, 0755)
if err != nil {
logger.Log.Panicf("could not create output directory '%s': %s", dir, err)
}
logger.Log.Noticef("created output directory: %s", dir)
logger.Eexit(err, "could not create output directory '%s'", dir)
logger.I("created output directory: %s", dir)
} else if dirH != nil {
if dirH.IsDir() {
logger.Log.Noticef("output directory '%s' already exists", dir)
logger.I("output directory '%s' already exists", dir)
} else {
logger.Log.Panicf("output directory '%s' is no directory", dir)
logger.Exit("output directory '%s' is no directory", dir)
}
} else {
logger.Log.Panicf("unknown error for output directory '%s': %s", dir, err)
logger.Perr(err, "unknown error for output directory '%s'", dir)
}
}

View File

@ -32,7 +32,7 @@ func worker(jobChan <-chan []Job) {
}
func init() {
//logger.Log.Infof("number of CPU core: %d", numCPU)
//logger.I("number of CPU core: %d", numCPU)
// one core for main thread
for i := 0; i < numCPU; i++ {
wg.Add(1)

View File

@ -118,6 +118,14 @@ func Eexit(err error, format string, args ...interface{}) {
}
}
// Exit is shorthand for
// Log.Errorf(...)
// os.Exit(1)
func Exit(format string, args ...interface{}) {
E(format, args...)
os.Exit(1)
}
// Perr is shorthand for
// if err != nil {
// Log.Panicf(...)

View File

@ -1,7 +1,6 @@
package mark2web
import (
"log"
"path"
"regexp"
"strings"
@ -22,11 +21,10 @@ func ProcessAssets() {
if !strings.HasPrefix(to, "/") {
to = Config.Directories.Output + "/" + to
}
logger.Log.Noticef("copying assets from '%s' to '%s'", from, to)
logger.N("copying assets from '%s' to '%s'", from, to)
err := cpy.Copy(from, to)
if err != nil {
logger.Log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
}
logger.Perr(err, "could not copy assets from '%s' to '%s'", from, to)
if Config.Assets.Compress {
compressFilesInDir(to)
@ -37,18 +35,17 @@ func ProcessAssets() {
// fixAssetsPath replaces assets path based on current path
func (node *TreeNode) fixAssetsPath(str string) string {
if find := Config.Assets.FixTemplate.Find; find != "" {
logger.Log.Debugf("fixing assets paths for path '%s'", node.CurrentNavPath())
logger.D("fixing assets paths for path '%s'", node.CurrentNavPath())
repl := Config.Assets.FixTemplate.Replace
toPath := Config.Assets.ToPath
bToRoot := node.BackToRootPath()
regex, err := regexp.Compile(find)
if err != nil {
log.Panicf("could not compile regexp '%s' for assets path: %s", find, err)
}
logger.Eexit(err, "could not compile regexp '%s' for assets path", find)
repl = bToRoot + toPath + "/" + repl
repl = path.Clean(repl) + "/"
logger.Log.Debugf("new assets paths: %s", repl)
logger.D("new assets paths: %s", repl)
return regex.ReplaceAllString(str, repl)
}

View File

@ -15,12 +15,11 @@ var brotliSupported = true
func handleBrotliCompression(filename string, content []byte) {
brFilename := filename + ".br"
logger.Log.Infof("writing to compressed output file: %s", brFilename)
logger.I("writing to compressed output file: %s", brFilename)
f, err := os.Create(brFilename)
if err != nil {
logger.Log.Panicf("could not create file '%s': %s", brFilename, err)
}
logger.Eexit(err, "could not create file '%s'", brFilename)
defer f.Close()
bw := enc.NewBrotliWriter(f, nil)
@ -29,20 +28,14 @@ func handleBrotliCompression(filename string, content []byte) {
if content != nil {
// content given
_, err = bw.Write(content)
if err != nil {
logger.Log.Panicf("could not write brotli content for '%s': %s", filename, err)
}
logger.Eexit(err, "could not write brotli content for '%s'", filename)
} else {
// read file
r, err := os.Open(filename)
if err != nil {
logger.Log.Panicf("could not open file '%s': %s", filename, err)
}
logger.Eexit(err, "could not open file '%s'", filename)
defer r.Close()
_, err = io.Copy(bw, r)
if err != nil {
logger.Log.Panicf("could not write brotli file for '%s': %s", filename, err)
}
logger.Eexit(err, "could not write brotli file for '%s'", filename)
}
}

View File

@ -25,11 +25,11 @@ func (node *TreeNode) handleCollections() {
collections := append(node.Config.Collections, node.Config.This.Collections...)
for _, colConfig := range collections {
if colConfig.Name == nil || *colConfig.Name == "" {
logger.Log.Panicf("missing Name in collection config in '%s'", node.InputPath)
logger.Exit("missing Name in collection config in '%s'", node.InputPath)
}
if (colConfig.URL == nil || *colConfig.URL == "") &&
(colConfig.Directory == nil) {
logger.Log.Panicf("missing URL and Directory in collection config in '%s'", node.InputPath)
logger.Exit("missing URL and Directory in collection config in '%s'", node.InputPath)
}
if node.ColMap == nil {
@ -46,9 +46,7 @@ func (node *TreeNode) handleCollections() {
if colConfig.URL != nil {
url, err := pongo2.RenderTemplateString(*colConfig.URL, ctx)
if err != nil {
logger.Log.Panicf("invalid template string for Collection Element.URL in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for Collection Element.URL in '%s'", node.InputPath)
errSrcText = "URL " + url
cacheKey = url
@ -57,7 +55,7 @@ func (node *TreeNode) handleCollections() {
colData = cacheEntry.data
cacheEntry.hit++
} else {
logger.Log.Noticef("reading collection from: %s", errSrcText)
logger.N("reading collection from: %s", errSrcText)
colData = webrequest.GetJSON(url)
colCache[url] = &colCacheEntry{
data: colData,
@ -69,20 +67,16 @@ func (node *TreeNode) handleCollections() {
path := node.ResolveInputPath(colConfig.Directory.Path)
errSrcText = "DIR " + path
logger.Log.Noticef("reading collection from: %s", errSrcText)
logger.N("reading collection from: %s", errSrcText)
d, err := ioutil.ReadDir(path)
if err != nil {
logger.Log.Panicf("could not read directory '%s': %s", path, err)
}
logger.Eexit(err, "could not read directory '%s'", path)
mStr := "."
if colConfig.Directory.MatchFilename != "" {
mStr = colConfig.Directory.MatchFilename
}
matcher, err := regexp.Compile(mStr)
if err != nil {
logger.Log.Panicf("could not compile regex for MatchFilename '%s' in '%s': %s", mStr, path, err)
}
logger.Eexit(err, "could not compile regex for MatchFilename '%s' in '%s'", mStr, path)
if colConfig.Directory.ReverseOrder {
for i := len(d)/2 - 1; i >= 0; i-- {
@ -96,9 +90,8 @@ func (node *TreeNode) handleCollections() {
if !fh.IsDir() && matcher.MatchString(fh.Name()) {
inFile := path + "/" + fh.Name()
md, err := ioutil.ReadFile(inFile)
if err != nil {
logger.Log.Panicf("could not read file '%s': %s", inFile, err)
}
logger.Eexit(err, "could not read file '%s'", inFile)
_, ctx := node.processMarkdownWithHeader(md, inFile)
(*ctx)["FilenameMatch"] = helper.GetRegexpParams(matcher, fh.Name())
fcolData = append(fcolData, *ctx)
@ -118,41 +111,36 @@ func (node *TreeNode) handleCollections() {
if colDataMap, ok = colData.(map[string]interface{}); ok {
entries, ok = colDataMap[navT.EntriesAttribute].([]interface{})
if !ok {
logger.Log.Debug(spew.Sdump(colDataMap))
logger.Log.Panicf("invalid json data in [%s] from '%s' for entries", navT.EntriesAttribute, errSrcText)
logger.D(spew.Sdump(colDataMap))
logger.Exit("invalid json data in [%s] from '%s' for entries", navT.EntriesAttribute, errSrcText)
}
}
} else {
entries, ok = colData.([]interface{})
}
if !ok {
logger.Log.Debug(spew.Sdump(colData))
logger.Log.Panicf("invalid json data from '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", errSrcText)
logger.D(spew.Sdump(colData))
logger.Exit("invalid json data from '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", errSrcText)
}
// build navigation with detail sites
for idx, colEl := range entries {
ctxE := make(pongo2.Context)
err := helper.Merge(&ctxE, ctx)
if err != nil {
logger.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "could not merge context in '%s'", node.InputPath)
var jsonCtx map[string]interface{}
if jsonCtx, ok = colEl.(map[string]interface{}); !ok {
logger.Log.Debug(spew.Sdump(colEl))
logger.Log.Panicf("no json object for entry index %d from '%s'", idx, errSrcText)
logger.D(spew.Sdump(colEl))
logger.Exit("no json object for entry index %d from '%s'", idx, errSrcText)
}
err = helper.Merge(&ctxE, pongo2.Context(jsonCtx))
if err != nil {
logger.Log.Panicf("could not merge context in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "could not merge context in '%s'", node.InputPath)
tpl := ""
if navT.Template != "" {
tpl, err = pongo2.RenderTemplateString(navT.Template, ctxE)
if err != nil {
logger.Log.Panicf("invalid template string for NavTemplate.Template in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for NavTemplate.Template in '%s'", node.InputPath)
}
if tpl == "" {
tpl = *node.Config.Template
@ -161,48 +149,41 @@ func (node *TreeNode) handleCollections() {
dataKey := ""
if navT.DataKey != "" {
dataKey, err = pongo2.RenderTemplateString(navT.DataKey, ctxE)
if err != nil {
logger.Log.Panicf("invalid template string for NavTemplate.DataKey in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for NavTemplate.DataKey in '%s'", node.InputPath)
}
goTo, err := pongo2.RenderTemplateString(navT.GoTo, ctxE)
if err != nil {
logger.Log.Panicf("invalid template string for NavTemplate.GoTo in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for NavTemplate.GoTo in '%s'", node.InputPath)
goTo = strings.Trim(goTo, "/")
goTo = path.Clean(goTo)
if strings.Contains(goTo, "..") {
logger.Log.Panicf("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", node.InputPath, goTo)
logger.Exit("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", node.InputPath, goTo)
}
if goTo == "." {
logger.Log.Panicf("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", node.InputPath)
logger.Exit("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", node.InputPath)
}
if goTo == "" {
logger.Log.Panicf("missing NavTemplate.GoTo in collection config in '%s'", node.InputPath)
logger.Exit("missing NavTemplate.GoTo in collection config in '%s'", node.InputPath)
}
navname := ""
if navT.Navname != "" {
navname, err = pongo2.RenderTemplateString(navT.Navname, ctxE)
if err != nil {
logger.Log.Panicf("invalid template string for NavTemplate.Navname in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for NavTemplate.Navname in '%s'", node.InputPath)
}
body := ""
if navT.Body != "" {
body, err = pongo2.RenderTemplateString(navT.Body, ctxE)
if err != nil {
logger.Log.Panicf("invalid template string for NavTemplate.Body in '%s': %s", node.InputPath, err)
}
logger.Eexit(err, "invalid template string for NavTemplate.Body in '%s'", node.InputPath)
}
if l := len(colCache[cacheKey].navnames); colCache[cacheKey].hit > 1 &&
l > 0 &&
navname == colCache[cacheKey].navnames[l-1] {
// navname before used same url, so recursion loop
logger.Log.Panicf("collection request loop detected for in '%s' for : %s", node.InputPath, errSrcText)
logger.Exit("collection request loop detected for in '%s' for : %s", node.InputPath, errSrcText)
}
colCache[cacheKey].navnames = append(colCache[cacheKey].navnames, navname)

View File

@ -24,38 +24,28 @@ func handleCompression(filename string, content []byte) {
if Config.Compress.GZIP {
gzFilename := filename + ".gz"
logger.Log.Infof("writing to compressed output file: %s", gzFilename)
logger.I("writing to compressed output file: %s", gzFilename)
f, err := os.Create(gzFilename)
if err != nil {
logger.Log.Panicf("could not create file '%s': %s", gzFilename, err)
}
logger.Eexit(err, "could not create file '%s'", gzFilename)
defer f.Close()
zw, err := gzip.NewWriterLevel(f, gzip.BestCompression)
if err != nil {
logger.Log.Panicf("could not initialize gzip writer for '%s': %s", filename, err)
}
logger.Eexit(err, "could not initialize gzip writer for '%s'", filename)
defer zw.Close()
if content != nil {
// content given
_, err = zw.Write(content)
if err != nil {
logger.Log.Panicf("could not write gziped content for '%s': %s", filename, err)
}
logger.Eexit(err, "could not write gziped content for '%s'", filename)
} else {
// read file
r, err := os.Open(filename)
if err != nil {
logger.Log.Panicf("could not open file '%s': %s", filename, err)
}
logger.Eexit(err, "could not open file '%s'", filename)
defer r.Close()
_, err = io.Copy(zw, r)
if err != nil {
logger.Log.Panicf("could not gzip file '%s': %s", filename, err)
}
logger.Eexit(err, "could not gzip file '%s'", filename)
}
}
@ -67,14 +57,12 @@ func handleCompression(filename string, content []byte) {
}
func compressFilesInDir(dir string) {
logger.Log.Noticef("compressing configured files in: %s", dir)
logger.N("compressing configured files in: %s", dir)
var _processDir func(string)
_processDir = func(d string) {
entries, err := ioutil.ReadDir(d)
if err != nil {
logger.Log.Panicf("could not read dir '%s': %s", d, err)
}
logger.Eexit(err, "could not read dir '%s'", d)
for _, entry := range entries {
if entry.IsDir() {

View File

@ -29,9 +29,7 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
node.fillConfig(inBase, outBase, dir, conf)
files, err := ioutil.ReadDir(node.InputPath)
if err != nil {
logger.Log.Panic(err)
}
logger.Eexit(err, "could not read dir '%s'", node.InputPath)
// first only files
for _, f := range files {
@ -39,14 +37,14 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
if !f.IsDir() && f.Name() != "config.yml" {
switch path.Ext(f.Name()) {
case ".md":
logger.Log.Debugf(".MD %s", p)
logger.D(".MD %s", p)
if node.InputFiles == nil {
node.InputFiles = make([]string, 0)
}
node.InputFiles = append(node.InputFiles, f.Name())
break
default:
logger.Log.Debugf("FIL %s", p)
logger.D("FIL %s", p)
if node.OtherFiles == nil {
node.OtherFiles = make([]string, 0)
}
@ -59,7 +57,7 @@ func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string,
for _, f := range files {
p := node.InputPath + "/" + f.Name()
if f.IsDir() {
logger.Log.Debugf("DIR %s", p)
logger.D("DIR %s", p)
newTree := new(TreeNode)
newTree.root = node.root
if node.Sub == nil {
@ -81,18 +79,16 @@ func (node *TreeNode) processMarkdownWithHeader(md []byte, errorRef string) (*Pa
// replace tabs
yamlData = bytes.Replace(yamlData, []byte("\t"), []byte(" "), -1)
logger.Log.Debugf("found yaml header in '%s', merging config", errorRef)
logger.D("found yaml header in '%s', merging config", errorRef)
err := yaml.Unmarshal(yamlData, newConfig)
if err != nil {
logger.Log.Panicf("could not parse YAML header from '%s': %s", errorRef, err)
}
logger.Eexit(err, "could not parse YAML header from '%s'", errorRef)
logger.Log.Debug("merging config with upper config")
logger.D("merging config with upper config")
oldThis := newConfig.This
helper.Merge(newConfig, node.Config)
newConfig.This = oldThis
logger.Log.Debug(spew.Sdump(newConfig))
logger.D(spew.Sdump(newConfig))
md = headerRegex.ReplaceAll(md, []byte(""))
} else {
@ -191,27 +187,24 @@ func (node *TreeNode) ProcessContent() {
}
if ignoreRegex != nil && *ignoreRegex != "" {
regex, err := regexp.Compile(*ignoreRegex)
if err != nil {
logger.Log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
}
logger.Eexit(err, "could not compile filename.ignore regexp '%s' for file '%s'", *ignoreRegex, inFile)
ignoreFile = regex.MatchString(file)
}
}
if ignoreFile {
logger.Log.Infof("ignoring file '%s', because of filename.ignore", inFile)
logger.I("ignoring file '%s', because of filename.ignore", inFile)
} else {
var input []byte
if file != "" {
logger.Log.Debugf("reading file: %s", inFile)
logger.D("reading file: %s", inFile)
var err error
input, err = ioutil.ReadFile(inFile)
if err != nil {
logger.Log.Panicf("could not read '%s':%s", inFile, err)
}
logger.Log.Infof("processing input file '%s'", inFile)
logger.Eexit(err, "could not read '%s'", inFile)
logger.I("processing input file '%s'", inFile)
} else {
// use input string if available and input filename == ""
var inputString *string
@ -219,7 +212,7 @@ func (node *TreeNode) ProcessContent() {
inputString = i.InputString
}
if inputString != nil {
logger.Log.Debugf("using input string instead of file")
logger.D("using input string instead of file")
input = []byte(*inputString)
}
}
@ -251,9 +244,8 @@ func (node *TreeNode) ProcessContent() {
} else {
if stripRegex != nil && *stripRegex != "" {
regex, err := regexp.Compile(*stripRegex)
if err != nil {
logger.Log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
}
logger.Eexit(err, "could not compile filename.strip regexp '%s' for file '%s'", *stripRegex, inFile)
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
}
if outputExt != nil && *outputExt != "" {
@ -262,21 +254,17 @@ func (node *TreeNode) ProcessContent() {
}
outFile := node.OutputPath + "/" + outputFilename
logger.Log.Debugf("using '%s' as output file", outFile)
logger.Log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
logger.D("using '%s' as output file", outFile)
logger.D("rendering template '%s' for '%s'", *newConfig.Template, outFile)
templateFilename := *newConfig.Template
result, err := renderTemplate(*newConfig.Template, node, newConfig, ctx)
if err != nil {
logger.Log.Panicf("could not execute template '%s' for input file '%s': %s", templateFilename, inFile, err)
}
logger.Eexit(err, "could not execute template '%s' for input file '%s': %s", templateFilename, inFile)
result = node.fixAssetsPath(result)
logger.Log.Noticef("writing to output file: %s", outFile)
logger.N("writing to output file: %s", outFile)
err = ioutil.WriteFile(outFile, []byte(result), 0644)
if err != nil {
logger.Log.Panicf("could not write to output file '%s': %s", outFile, err)
}
logger.Eexit(err, "could not write to output file '%s'", outFile)
handleCompression(outFile, []byte(result))
@ -290,11 +278,9 @@ func (node *TreeNode) ProcessContent() {
case "copy":
from := node.InputPath + "/" + file
to := node.OutputPath + "/" + file
logger.Log.Noticef("copying file from '%s' to '%s'", from, to)
logger.N("copying file from '%s' to '%s'", from, to)
err := cpy.Copy(from, to)
if err != nil {
logger.Log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
}
logger.Eexit(err, "could not copy file from '%s' to '%s': %s", from, to)
handleCompression(to, nil)
}

View File

@ -43,36 +43,33 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
inPath += "/" + subDir
}
logger.Log.Infof("reading input directory: %s", inPath)
logger.I("reading input directory: %s", inPath)
node.InputPath = inPath
// read config
newConfig := new(PathConfig)
logger.Log.Debug("looking for config.yml ...")
logger.D("looking for config.yml ...")
configFile := inPath + "/config.yml"
if _, err := os.Stat(configFile); os.IsNotExist(err) {
logger.Log.Debug("no config.yml found in this directory, using upper configs")
logger.D("no config.yml found in this directory, using upper configs")
helper.Merge(newConfig, conf)
// remove this
newConfig.This = ThisPathConfig{}
} else {
logger.Log.Debug("reading config...")
logger.D("reading config...")
data, err := ioutil.ReadFile(configFile)
if err != nil {
logger.Log.Panicf("could not read file '%s': %s", configFile, err)
}
err = yaml.Unmarshal(data, newConfig)
if err != nil {
logger.Log.Panicf("could not parse YAML file '%s': %s", configFile, err)
}
logger.Eexit(err, "could not read file '%s'", configFile)
logger.Log.Debug("merging config with upper config")
err = yaml.Unmarshal(data, newConfig)
logger.Eexit(err, "could not parse YAML file '%s'", configFile)
logger.D("merging config with upper config")
oldThis := newConfig.This
helper.Merge(newConfig, conf)
newConfig.This = oldThis
logger.Log.Debug(spew.Sdump(newConfig))
logger.D(spew.Sdump(newConfig))
}
node.Config = newConfig
@ -85,7 +82,7 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
}
if regexStr != nil && *regexStr != "" {
if regex, err := regexp.Compile(*regexStr); err != nil {
logger.Log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+subDir, err)
logger.Eexit(err, "error compiling path.strip regex '%s' from '%s'", *regexStr, inBase+"/"+subDir)
} else {
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
}
@ -100,7 +97,7 @@ func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfi
outPath := outBase + "/" + stripedDir
outPath = path.Clean(outPath)
logger.Log.Infof("calculated output directory: %s", outPath)
logger.I("calculated output directory: %s", outPath)
node.OutputPath = outPath
// handle collections
@ -133,9 +130,8 @@ func (node *TreeNode) addSubNode(tplFilename, subDir string, navname string, ctx
mergedConfig := new(PathConfig)
err := helper.Merge(mergedConfig, node.Config)
if err != nil {
logger.Log.Panicf("merge of path config failed: %s", err)
}
logger.Eexit(err, "merge of path config failed")
// dont merge Data[DataKey]
if dataMapKey != "" {
mergedConfig.Data[dataMapKey] = nil
@ -143,9 +139,7 @@ func (node *TreeNode) addSubNode(tplFilename, subDir string, navname string, ctx
mergedConfig.Data = make(helper.MapString)
}
err = helper.Merge(mergedConfig, newPathConfig)
if err != nil {
logger.Log.Panicf("merge of path config failed: %s", err)
}
logger.Eexit(err, "merge of path config failed")
newNode.fillConfig(
node.InputPath,

View File

@ -12,13 +12,11 @@ func htaccessRedirect(outDir, goTo string) {
switch Config.Webserver.Type {
case "apache":
htaccessFile := outDir + "/.htaccess"
logger.Log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goTo)
logger.N("writing '%s' with redirect to: %s", htaccessFile, goTo)
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
RewriteRule ^$ %{REQUEST_URI}`+goTo+`/ [R,L]
`), 0644)
if err != nil {
logger.Log.Panicf("could not write '%s': %s", htaccessFile, err)
}
logger.Eexit(err, "could not write '%s'", htaccessFile)
}
}
@ -90,11 +88,9 @@ RemoveLanguage .br
if configStr != "" {
htaccessFile := Config.Directories.Output + "/.htaccess"
logger.Log.Noticef("writing webserver config to: %s", htaccessFile)
logger.N("writing webserver config to: %s", htaccessFile)
err := ioutil.WriteFile(htaccessFile, []byte(configStr), 0644)
if err != nil {
logger.Log.Panicf("could not write '%s': %s", htaccessFile, err)
}
logger.Eexit(err, "could not write '%s'", htaccessFile)
}
}
}

View File

@ -42,11 +42,10 @@ func buildNavigationRecursive(tree *TreeNode, curNavMap *map[string]*NavElement,
}
if ignNav != nil && *ignNav != "" {
regex, err := regexp.Compile(*ignNav)
if err != nil {
logger.Log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
}
logger.Eexit(err, "could not compile IngoreForNav regexp '%s' in '%s'", *ignNav, el.InputPath)
if regex.MatchString(path.Base(el.InputPath)) {
logger.Log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
logger.D("ignoring input directory '%s' in navigation", el.InputPath)
continue
}
}

View File

@ -27,7 +27,7 @@ var wrJSONCache = make(map[string]*wrJSONEntry)
// Get will fetch an url and returns reponse
func Get(url string, opts interface{}) (resp *http.Response, err error) {
logger.Log.Noticef("requesting url via GET %s", url)
logger.N("requesting url via GET %s", url)
progress.IncrTotal("web request")
progress.DescribeCurrent("web request", url)
@ -41,20 +41,16 @@ func GetJSON(url string) interface{} {
cached := wrJSONCache[url]
if cached == nil {
resp, err := Get(url, nil)
if err != nil {
logger.Log.Panicf("could not get url '%s': %s", url, err)
}
logger.Eexit(err, "could not get url '%s'", url)
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
logger.Log.Panicf("could not read body from url '%s': %s", url, err)
}
logger.Eexit(err, "could not read body from url '%s'", url)
logger.Log.Debugf("output from url '%s':\n%s", url, string(body))
logger.D("output from url '%s':\n%s", url, string(body))
if resp.StatusCode >= 400 {
logger.Log.Panicf("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
logger.Exit("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
}
contentType := resp.Header.Get("Content-Type")
@ -62,7 +58,7 @@ func GetJSON(url string) interface{} {
if strings.Contains(contentType, "json") {
} else {
logger.Log.Panicf("is not json '%s' from url '%s'", contentType, url)
logger.Exit("is not json '%s' from url '%s'", contentType, url)
}
cached = new(wrJSONEntry)
@ -77,7 +73,7 @@ func GetJSON(url string) interface{} {
if err == nil {
cached.data = jsonArrayMap
} else {
logger.P("could not read json from '%s': invalid type", url)
logger.Exit("could not read json from '%s': invalid type", url)
}
}