2019-02-10 13:49:27 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2019-02-24 19:12:31 +01:00
|
|
|
"bytes"
|
2019-02-11 15:00:27 +01:00
|
|
|
"flag"
|
2019-02-18 13:05:30 +01:00
|
|
|
"fmt"
|
2019-02-10 13:49:27 +01:00
|
|
|
"io/ioutil"
|
2019-02-11 15:00:27 +01:00
|
|
|
"os"
|
2019-02-12 11:35:25 +01:00
|
|
|
"path"
|
2019-02-19 18:18:40 +01:00
|
|
|
"reflect"
|
2019-02-11 15:25:48 +01:00
|
|
|
"regexp"
|
2019-02-11 15:00:27 +01:00
|
|
|
"strings"
|
2019-02-10 13:49:27 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
"github.com/imdario/mergo"
|
|
|
|
|
2019-02-18 18:02:35 +01:00
|
|
|
"github.com/Depado/bfchroma"
|
2019-02-11 15:00:27 +01:00
|
|
|
"github.com/davecgh/go-spew/spew"
|
2019-02-14 13:46:33 +01:00
|
|
|
"github.com/flosch/pongo2"
|
2019-02-14 14:42:46 +01:00
|
|
|
"github.com/gosimple/slug"
|
2019-02-11 15:00:27 +01:00
|
|
|
"github.com/op/go-logging"
|
2019-02-12 19:09:25 +01:00
|
|
|
cpy "github.com/otiai10/copy"
|
2019-02-12 11:35:25 +01:00
|
|
|
"gopkg.in/russross/blackfriday.v2"
|
2019-02-11 15:00:27 +01:00
|
|
|
"gopkg.in/yaml.v2"
|
2019-02-10 13:49:27 +01:00
|
|
|
)
|
|
|
|
|
2019-02-18 13:05:30 +01:00
|
|
|
var (
|
|
|
|
// Version is the app's version string
|
|
|
|
Version = "UNKNOWN"
|
|
|
|
// GitHash is the current git hash for this version
|
|
|
|
GitHash = "UNKNOWN"
|
|
|
|
// BuildTime is the time of build of this app
|
|
|
|
BuildTime = "UNKNOWN"
|
|
|
|
)
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
var log = logging.MustGetLogger("myLogger")
|
|
|
|
|
|
|
|
var inDir *string
|
|
|
|
var outDir *string
|
|
|
|
|
2019-02-14 13:46:33 +01:00
|
|
|
var templateCache = make(map[string]*pongo2.Template)
|
2019-02-12 12:52:46 +01:00
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
// GlobalConfig is config which is used only once in root dir
|
|
|
|
type GlobalConfig struct {
|
|
|
|
Webserver struct {
|
2019-02-12 19:09:25 +01:00
|
|
|
Type string `yaml:"Type"`
|
2019-02-12 12:52:46 +01:00
|
|
|
} `yaml:"Webserver"`
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
Assets struct {
|
2019-02-12 19:09:25 +01:00
|
|
|
FromPath string `yaml:"FromPath"`
|
|
|
|
ToPath string `yaml:"ToPath"`
|
|
|
|
Action string `yaml:"Action"`
|
|
|
|
FixTemplate struct {
|
|
|
|
Find string `yaml:"Find"`
|
|
|
|
Replace string `yaml:"Replace"`
|
|
|
|
} `yaml:"FixTemplate"`
|
2019-02-12 12:52:46 +01:00
|
|
|
} `yaml:"Assets"`
|
2019-02-12 20:04:07 +01:00
|
|
|
|
|
|
|
OtherFiles struct {
|
|
|
|
Action string `yaml:"Action"`
|
|
|
|
} `yaml:"OtherFiles"`
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
var globalConfig = new(GlobalConfig)
|
|
|
|
|
2019-02-14 13:49:19 +01:00
|
|
|
// ThisPathConfig is struct for This in paths yaml
|
2019-02-13 19:50:26 +01:00
|
|
|
type ThisPathConfig struct {
|
|
|
|
Navname *string `yaml:"Navname"`
|
|
|
|
GoTo *string `yaml:"GoTo"`
|
|
|
|
Data interface{} `yaml:"Data"`
|
|
|
|
}
|
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type indexStruct struct {
|
|
|
|
InputFile *string `yaml:"InputFile"`
|
|
|
|
OutputFile *string `yaml:"OutputFile"`
|
|
|
|
}
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type metaStruct struct {
|
|
|
|
Title *string `yaml:"Title"`
|
|
|
|
Description *string `yaml:"Description"`
|
|
|
|
Keywords *string `yaml:"Keywords"`
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type pathStruct struct {
|
|
|
|
Strip *string `yaml:"Strip"`
|
|
|
|
IgnoreForNav *string `yaml:"IgnoreForNav"`
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type filenameStruct struct {
|
|
|
|
Strip *string `yaml:"Strip"`
|
|
|
|
Ignore *string `yaml:"Ignore"`
|
|
|
|
OutputExtension *string `yaml:"OutputExtension"`
|
|
|
|
}
|
2019-02-12 10:21:51 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type markdownStruct struct {
|
|
|
|
ChromaRenderer *bool `yaml:"ChromaRenderer"`
|
|
|
|
ChromaStyle *string `yaml:"ChromaStyle"`
|
|
|
|
}
|
2019-02-12 13:05:26 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
// PathConfig of subdir
|
|
|
|
type PathConfig struct {
|
|
|
|
This ThisPathConfig `yaml:"This"`
|
|
|
|
Template *string `yaml:"Template"`
|
|
|
|
Index *indexStruct `yaml:"Index"`
|
|
|
|
Meta *metaStruct `yaml:"Meta"`
|
|
|
|
Path *pathStruct `yaml:"Path"`
|
|
|
|
Filename *filenameStruct `yaml:"Filename"`
|
|
|
|
Markdown *markdownStruct `yaml:"Markdown"`
|
2019-02-18 18:02:35 +01:00
|
|
|
|
2019-02-12 13:05:26 +01:00
|
|
|
Data interface{} `yaml:"Data"`
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// PathConfigTree is complete config tree of content dir
|
|
|
|
type PathConfigTree struct {
|
|
|
|
InputPath string
|
|
|
|
OutputPath string
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
InputFiles []string
|
2019-02-12 20:04:07 +01:00
|
|
|
OtherFiles []string
|
2019-02-12 10:21:51 +01:00
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
Config *PathConfig
|
|
|
|
Sub []*PathConfigTree
|
|
|
|
}
|
|
|
|
|
|
|
|
var contentConfig = new(PathConfigTree)
|
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
type ptrTransformer struct{}
|
|
|
|
|
|
|
|
func (t ptrTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
|
|
|
|
if typ.Kind() == reflect.Ptr {
|
|
|
|
return func(dst, src reflect.Value) error {
|
|
|
|
if dst.CanSet() {
|
|
|
|
if dst.IsNil() {
|
|
|
|
dst.Set(src)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func merge(dst, src interface{}) error {
|
|
|
|
return mergo.Merge(dst, src, mergo.WithTransformers(ptrTransformer{}))
|
|
|
|
}
|
|
|
|
|
2019-02-15 12:11:27 +01:00
|
|
|
func backToRoot(curNavPath string) string {
|
|
|
|
tmpPath := ""
|
|
|
|
if curNavPath != "" {
|
|
|
|
for i := strings.Count(curNavPath, "/") + 1; i > 0; i-- {
|
|
|
|
tmpPath += "../"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tmpPath
|
|
|
|
}
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
func readContentDir(inBase string, outBase string, dir string, conf *PathConfig, tree *PathConfigTree) {
|
|
|
|
inPath := inBase
|
|
|
|
if dir != "" {
|
|
|
|
inPath += "/" + dir
|
|
|
|
}
|
|
|
|
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("reading input directory: %s", inPath)
|
2019-02-12 10:21:51 +01:00
|
|
|
|
|
|
|
files, err := ioutil.ReadDir(inPath)
|
2019-02-11 15:00:27 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Panic(err)
|
|
|
|
}
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
tree.InputPath = inPath
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
// read config
|
2019-02-12 15:28:22 +01:00
|
|
|
newConfig := new(PathConfig)
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug("looking for config.yml ...")
|
2019-02-12 10:21:51 +01:00
|
|
|
configFile := inPath + "/config.yml"
|
|
|
|
if _, err = os.Stat(configFile); os.IsNotExist(err) {
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug("no config.yml found in this directory, using upper configs")
|
2019-02-19 18:18:40 +01:00
|
|
|
merge(newConfig, conf)
|
2019-02-12 12:52:46 +01:00
|
|
|
// remove this
|
2019-02-13 19:50:26 +01:00
|
|
|
newConfig.This = ThisPathConfig{}
|
2019-02-11 15:00:27 +01:00
|
|
|
} else {
|
|
|
|
log.Debug("reading config...")
|
2019-02-12 10:21:51 +01:00
|
|
|
data, err := ioutil.ReadFile(configFile)
|
2019-02-11 15:00:27 +01:00
|
|
|
if err != nil {
|
2019-02-12 10:21:51 +01:00
|
|
|
log.Panicf("could not read file '%s': %s", configFile, err)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
err = yaml.Unmarshal(data, newConfig)
|
|
|
|
if err != nil {
|
2019-02-12 10:21:51 +01:00
|
|
|
log.Panicf("could not parse YAML file '%s': %s", configFile, err)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("merging config with upper config")
|
2019-02-13 19:50:26 +01:00
|
|
|
oldThis := newConfig.This
|
2019-02-19 18:18:40 +01:00
|
|
|
merge(newConfig, conf)
|
2019-02-13 19:50:26 +01:00
|
|
|
newConfig.This = oldThis
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug(spew.Sdump(newConfig))
|
|
|
|
}
|
|
|
|
|
|
|
|
tree.Config = newConfig
|
|
|
|
|
2019-02-11 15:25:48 +01:00
|
|
|
// calc outDir
|
2019-02-12 10:21:51 +01:00
|
|
|
stripedDir := dir
|
2019-02-19 18:18:40 +01:00
|
|
|
var regexStr *string
|
|
|
|
if newConfig.Path != nil {
|
|
|
|
regexStr = newConfig.Path.Strip
|
|
|
|
}
|
2019-02-11 15:25:48 +01:00
|
|
|
if regexStr != nil && *regexStr != "" {
|
2019-02-12 10:21:51 +01:00
|
|
|
if regex, err := regexp.Compile(*regexStr); err != nil {
|
|
|
|
log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+dir, err)
|
2019-02-11 15:25:48 +01:00
|
|
|
} else {
|
2019-02-12 11:35:25 +01:00
|
|
|
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
|
2019-02-11 15:25:48 +01:00
|
|
|
}
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
if tree.Config.This.Navname == nil {
|
|
|
|
navname := strings.Replace(stripedDir, "_", " ", -1)
|
|
|
|
tree.Config.This.Navname = &navname
|
|
|
|
}
|
|
|
|
|
2019-02-14 14:42:46 +01:00
|
|
|
stripedDir = slug.Make(stripedDir)
|
|
|
|
outPath := outBase + "/" + stripedDir
|
|
|
|
outPath = path.Clean(outPath)
|
|
|
|
|
|
|
|
log.Infof("calculated output directory: %s", outPath)
|
|
|
|
tree.OutputPath = outPath
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
// first only files
|
|
|
|
for _, f := range files {
|
2019-02-12 10:21:51 +01:00
|
|
|
p := inPath + "/" + f.Name()
|
2019-02-15 12:11:27 +01:00
|
|
|
if !f.IsDir() && f.Name() != "config.yml" {
|
2019-02-12 20:04:07 +01:00
|
|
|
switch path.Ext(f.Name()) {
|
|
|
|
case ".md":
|
|
|
|
log.Debugf(".MD %s", p)
|
|
|
|
if tree.InputFiles == nil {
|
|
|
|
tree.InputFiles = make([]string, 0)
|
|
|
|
}
|
|
|
|
tree.InputFiles = append(tree.InputFiles, f.Name())
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
log.Debugf("FIL %s", p)
|
|
|
|
if tree.OtherFiles == nil {
|
|
|
|
tree.OtherFiles = make([]string, 0)
|
|
|
|
}
|
|
|
|
tree.OtherFiles = append(tree.OtherFiles, f.Name())
|
2019-02-12 10:21:51 +01:00
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// only directorys, needed config before
|
|
|
|
for _, f := range files {
|
2019-02-12 10:21:51 +01:00
|
|
|
p := inPath + "/" + f.Name()
|
2019-02-11 15:00:27 +01:00
|
|
|
if f.IsDir() {
|
|
|
|
log.Debugf("DIR %s", p)
|
|
|
|
newTree := new(PathConfigTree)
|
|
|
|
if tree.Sub == nil {
|
2019-02-12 10:21:51 +01:00
|
|
|
tree.Sub = make([]*PathConfigTree, 0)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
tree.Sub = append(tree.Sub, newTree)
|
2019-02-12 10:21:51 +01:00
|
|
|
readContentDir(inPath, outPath, f.Name(), newConfig, newTree)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
type navElement struct {
|
|
|
|
Navname string
|
|
|
|
GoTo string
|
2019-02-13 13:14:16 +01:00
|
|
|
Active bool
|
2019-02-12 15:28:22 +01:00
|
|
|
|
2019-02-13 19:50:26 +01:00
|
|
|
Data interface{}
|
|
|
|
|
|
|
|
This ThisPathConfig
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
SubMap *map[string]*navElement
|
|
|
|
SubSlice *[]*navElement
|
|
|
|
}
|
|
|
|
|
2019-02-13 15:44:16 +01:00
|
|
|
func buildNavigation(conf *PathConfigTree, curNavMap *map[string]*navElement, curNavSlice *[]*navElement, navActive *[]*navElement, activeNav string) {
|
2019-02-12 15:28:22 +01:00
|
|
|
for _, el := range conf.Sub {
|
2019-02-19 18:18:40 +01:00
|
|
|
var ignNav *string
|
|
|
|
if p := el.Config.Path; p != nil {
|
|
|
|
ignNav = p.IgnoreForNav
|
|
|
|
}
|
2019-02-13 13:26:49 +01:00
|
|
|
if ignNav != nil && *ignNav != "" {
|
|
|
|
regex, err := regexp.Compile(*ignNav)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
|
|
|
|
}
|
|
|
|
if regex.MatchString(path.Base(el.InputPath)) {
|
2019-02-13 15:44:16 +01:00
|
|
|
log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
|
2019-02-13 13:26:49 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-13 13:14:16 +01:00
|
|
|
elPath := strings.TrimPrefix(el.OutputPath, *outDir+"/")
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
subMap := make(map[string]*navElement)
|
|
|
|
subSlice := make([]*navElement, 0)
|
|
|
|
navEl := navElement{
|
2019-02-13 13:14:16 +01:00
|
|
|
Active: strings.HasPrefix(activeNav, elPath),
|
2019-02-13 19:50:26 +01:00
|
|
|
Data: el.Config.Data,
|
2019-02-12 15:28:22 +01:00
|
|
|
SubMap: &subMap,
|
|
|
|
SubSlice: &subSlice,
|
|
|
|
}
|
|
|
|
|
2019-02-13 19:50:26 +01:00
|
|
|
navEl.This = el.Config.This
|
|
|
|
|
2019-02-13 15:44:16 +01:00
|
|
|
if navEl.Active {
|
|
|
|
// add to navActive level navigation
|
|
|
|
currentLevel := strings.Count(activeNav, "/")
|
|
|
|
if len(*navActive) <= currentLevel {
|
|
|
|
// not registered
|
|
|
|
*navActive = append(*navActive, &navEl)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
n := el.Config.This.Navname
|
|
|
|
if n != nil {
|
|
|
|
navEl.Navname = *n
|
|
|
|
}
|
|
|
|
g := el.Config.This.GoTo
|
|
|
|
if g != nil {
|
|
|
|
if strings.HasPrefix(*g, "/") {
|
|
|
|
// abslute
|
|
|
|
navEl.GoTo = *g
|
|
|
|
} else {
|
|
|
|
// relative
|
|
|
|
navEl.GoTo = elPath + "/" + *g
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
navEl.GoTo = elPath + "/"
|
|
|
|
}
|
|
|
|
|
|
|
|
if activeNav != "" && activeNav != "/" {
|
|
|
|
// calculate relative path
|
2019-02-15 12:11:27 +01:00
|
|
|
bToRoot := backToRoot(activeNav)
|
|
|
|
navEl.GoTo = bToRoot + navEl.GoTo
|
2019-02-12 15:46:14 +01:00
|
|
|
navEl.GoTo = path.Clean(navEl.GoTo)
|
2019-02-12 15:28:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
(*curNavMap)[navEl.Navname] = &navEl
|
|
|
|
if curNavSlice != nil {
|
|
|
|
*curNavSlice = append(*curNavSlice, &navEl)
|
|
|
|
}
|
|
|
|
|
2019-02-13 15:44:16 +01:00
|
|
|
buildNavigation(el, &subMap, &subSlice, navActive, activeNav)
|
2019-02-12 15:28:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-11 15:25:48 +01:00
|
|
|
func processContent(conf *PathConfigTree) {
|
2019-02-12 10:21:51 +01:00
|
|
|
log.Debugf("trying to create output directory: %s", conf.OutputPath)
|
|
|
|
|
|
|
|
if dirH, err := os.Stat(conf.OutputPath); os.IsNotExist(err) {
|
|
|
|
err := os.MkdirAll(conf.OutputPath, 0755)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not create output directory '%s': %s", conf.OutputPath, err)
|
|
|
|
}
|
|
|
|
log.Noticef("created output directory: %s", conf.OutputPath)
|
|
|
|
} else if dirH != nil {
|
|
|
|
if dirH.IsDir() {
|
|
|
|
log.Noticef("output directory '%s' already exists", conf.OutputPath)
|
|
|
|
} else {
|
|
|
|
log.Panicf("output directory '%s' is no directory", conf.OutputPath)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Panicf("unknown error for output directory '%s': %s", conf.OutputPath, err)
|
|
|
|
}
|
2019-02-11 15:25:48 +01:00
|
|
|
|
2019-02-15 14:08:37 +01:00
|
|
|
curNavPath := strings.TrimPrefix(conf.OutputPath, *outDir)
|
|
|
|
curNavPath = strings.TrimPrefix(curNavPath, "/")
|
|
|
|
curNavPath = path.Clean(curNavPath)
|
|
|
|
if curNavPath == "." {
|
|
|
|
curNavPath = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
goTo := conf.Config.This.GoTo
|
|
|
|
if goTo != nil && *goTo != "" {
|
|
|
|
goToFixed := *goTo
|
|
|
|
if strings.HasPrefix(goToFixed, "/") {
|
|
|
|
goToFixed = backToRoot(curNavPath) + goToFixed
|
|
|
|
}
|
2019-02-15 15:05:48 +01:00
|
|
|
goToFixed = path.Clean(goToFixed)
|
|
|
|
|
|
|
|
switch globalConfig.Webserver.Type {
|
|
|
|
case "apache":
|
|
|
|
htaccessFile := conf.OutputPath + "/.htaccess"
|
|
|
|
log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goToFixed)
|
|
|
|
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
|
|
|
|
RewriteRule ^$ %{REQUEST_URI}`+goToFixed+`/ [R,L]
|
|
|
|
`), 0644)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not write '%s': %s", htaccessFile, err)
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
2019-02-15 14:08:37 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
for _, file := range conf.InputFiles {
|
|
|
|
inFile := conf.InputPath + "/" + file
|
|
|
|
log.Debugf("reading file: %s", inFile)
|
|
|
|
|
|
|
|
input, err := ioutil.ReadFile(inFile)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not read '%s':%s", inFile, err)
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("processing input file '%s'", inFile)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
newConfig := new(PathConfig)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-13 19:14:23 +01:00
|
|
|
regex := regexp.MustCompile("(?s)^---(.*?)\\r?\\n\\r?---\\r?\\n\\r?")
|
2019-02-12 11:35:25 +01:00
|
|
|
yamlData := regex.Find(input)
|
|
|
|
if string(yamlData) != "" {
|
|
|
|
log.Debugf("found yaml header in '%s', merging config", inFile)
|
|
|
|
err = yaml.Unmarshal(yamlData, newConfig)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not parse YAML header from '%s': %s", inFile, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("merging config with upper config")
|
2019-02-13 19:50:26 +01:00
|
|
|
oldThis := newConfig.This
|
2019-02-19 18:18:40 +01:00
|
|
|
merge(newConfig, conf.Config)
|
2019-02-13 19:50:26 +01:00
|
|
|
newConfig.This = oldThis
|
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
log.Debug(spew.Sdump(newConfig))
|
|
|
|
|
|
|
|
input = regex.ReplaceAll(input, []byte(""))
|
|
|
|
} else {
|
2019-02-19 18:18:40 +01:00
|
|
|
merge(newConfig, conf.Config)
|
2019-02-12 11:35:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// ignore ???
|
|
|
|
ignoreFile := false
|
2019-02-19 18:18:40 +01:00
|
|
|
var ignoreRegex *string
|
|
|
|
var stripRegex *string
|
|
|
|
var outputExt *string
|
|
|
|
if f := newConfig.Filename; f != nil {
|
|
|
|
ignoreRegex = f.Ignore
|
|
|
|
stripRegex = f.Strip
|
|
|
|
outputExt = f.OutputExtension
|
|
|
|
}
|
2019-02-12 11:35:25 +01:00
|
|
|
if ignoreRegex != nil && *ignoreRegex != "" {
|
|
|
|
regex, err := regexp.Compile(*ignoreRegex)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
|
|
|
|
}
|
|
|
|
ignoreFile = regex.MatchString(file)
|
|
|
|
}
|
|
|
|
|
|
|
|
if ignoreFile {
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("ignoring file '%s', because of filename.ignore", inFile)
|
2019-02-12 11:35:25 +01:00
|
|
|
} else {
|
|
|
|
|
|
|
|
// build output filename
|
|
|
|
outputFilename := file
|
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
var indexInputFile *string
|
|
|
|
var indexOutputFile *string
|
|
|
|
if i := newConfig.Index; i != nil {
|
|
|
|
indexInputFile = i.InputFile
|
|
|
|
indexOutputFile = i.OutputFile
|
|
|
|
}
|
2019-02-12 11:35:25 +01:00
|
|
|
|
|
|
|
if indexInputFile != nil && *indexInputFile == file && indexOutputFile != nil && *indexOutputFile != "" {
|
|
|
|
outputFilename = *indexOutputFile
|
|
|
|
} else {
|
|
|
|
if stripRegex != nil && *stripRegex != "" {
|
|
|
|
regex, err := regexp.Compile(*stripRegex)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
|
|
|
|
}
|
|
|
|
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
|
|
|
|
}
|
|
|
|
if outputExt != nil && *outputExt != "" {
|
|
|
|
outputFilename += "." + *outputExt
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
outFile := conf.OutputPath + "/" + outputFilename
|
|
|
|
log.Debugf("using '%s' as output file", outFile)
|
|
|
|
|
2019-02-18 18:11:13 +01:00
|
|
|
var options []blackfriday.Option
|
2019-02-18 18:02:35 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
var chromaRenderer *bool
|
|
|
|
var chromaStyle *string
|
|
|
|
if m := newConfig.Markdown; m != nil {
|
|
|
|
chromaRenderer = m.ChromaRenderer
|
|
|
|
chromaStyle = m.ChromaStyle
|
|
|
|
}
|
|
|
|
if chromaStyle == nil {
|
|
|
|
style := "monokai"
|
|
|
|
chromaStyle = &style
|
|
|
|
}
|
|
|
|
if chromaRenderer != nil && *chromaRenderer {
|
2019-02-18 18:11:13 +01:00
|
|
|
options = []blackfriday.Option{
|
2019-02-19 18:18:40 +01:00
|
|
|
blackfriday.WithRenderer(
|
|
|
|
bfchroma.NewRenderer(
|
|
|
|
bfchroma.Style(*chromaStyle),
|
|
|
|
),
|
|
|
|
),
|
2019-02-18 18:11:13 +01:00
|
|
|
}
|
2019-02-18 18:02:35 +01:00
|
|
|
}
|
|
|
|
|
2019-02-24 19:12:31 +01:00
|
|
|
// fix \r from markdown for blackfriday
|
|
|
|
input = bytes.Replace(input, []byte("\r"), []byte(""), -1)
|
2019-02-18 18:11:13 +01:00
|
|
|
html := blackfriday.Run(input, options...)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-13 19:14:23 +01:00
|
|
|
// use --- for splitting document in markdown parts
|
|
|
|
regex := regexp.MustCompile("\\r?\\n\\r?---\\r?\\n\\r?")
|
|
|
|
inputParts := regex.Split(string(input), -1)
|
2019-02-14 13:46:33 +01:00
|
|
|
htmlParts := make([]*pongo2.Value, 0)
|
2019-02-13 19:14:23 +01:00
|
|
|
for _, iPart := range inputParts {
|
2019-02-18 18:11:13 +01:00
|
|
|
htmlParts = append(htmlParts, pongo2.AsSafeValue(string(blackfriday.Run([]byte(iPart), options...))))
|
2019-02-13 19:14:23 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 12:52:46 +01:00
|
|
|
log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
|
|
|
templateFile := *inDir + "/templates/" + *newConfig.Template
|
|
|
|
template := templateCache[templateFile]
|
|
|
|
if template == nil {
|
|
|
|
var err error
|
2019-02-14 13:46:33 +01:00
|
|
|
if template, err = pongo2.FromFile(templateFile); err != nil {
|
2019-02-12 12:52:46 +01:00
|
|
|
log.Panicf("could not parse template '%s': %s", templateFile, err)
|
|
|
|
} else {
|
|
|
|
templateCache[templateFile] = template
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
// build navigation
|
2019-02-13 15:44:16 +01:00
|
|
|
navMap := make(map[string]*navElement)
|
|
|
|
navSlice := make([]*navElement, 0)
|
|
|
|
navActive := make([]*navElement, 0)
|
|
|
|
buildNavigation(contentConfig, &navMap, &navSlice, &navActive, curNavPath)
|
2019-02-12 15:28:22 +01:00
|
|
|
|
2019-02-12 12:52:46 +01:00
|
|
|
// read yaml header as data for template
|
2019-02-14 13:46:33 +01:00
|
|
|
ctx := make(pongo2.Context)
|
2019-02-19 15:18:37 +01:00
|
|
|
ctx["This"] = newConfig.This
|
2019-02-12 13:05:26 +01:00
|
|
|
ctx["Meta"] = newConfig.Meta
|
|
|
|
ctx["Data"] = newConfig.Data
|
2019-02-12 15:28:22 +01:00
|
|
|
ctx["NavMap"] = navMap
|
2019-02-13 15:44:16 +01:00
|
|
|
ctx["NavSlice"] = navSlice
|
|
|
|
ctx["NavActive"] = navActive
|
2019-02-14 13:46:33 +01:00
|
|
|
ctx["Body"] = pongo2.AsSafeValue(string(html))
|
2019-02-13 19:14:23 +01:00
|
|
|
ctx["BodyParts"] = htmlParts
|
2019-02-12 12:52:46 +01:00
|
|
|
|
2019-02-14 13:46:33 +01:00
|
|
|
result, err := template.Execute(ctx)
|
2019-02-12 12:52:46 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not execute template '%s' for input file '%s': %s", templateFile, inFile, err)
|
|
|
|
}
|
|
|
|
|
2019-02-12 19:09:25 +01:00
|
|
|
if find := globalConfig.Assets.FixTemplate.Find; find != "" {
|
|
|
|
log.Debugf("fixing assets paths in '%s' for '%s'", templateFile, inFile)
|
2019-02-15 12:11:27 +01:00
|
|
|
bToRoot := backToRoot(curNavPath)
|
2019-02-12 19:09:25 +01:00
|
|
|
regex, err := regexp.Compile(find)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile regexp '%s' for assets path: %s", find, err)
|
|
|
|
}
|
|
|
|
repl := globalConfig.Assets.FixTemplate.Replace
|
2019-02-15 12:11:27 +01:00
|
|
|
repl = bToRoot + globalConfig.Assets.ToPath + "/" + repl
|
2019-02-12 19:09:25 +01:00
|
|
|
repl = path.Clean(repl) + "/"
|
2019-02-13 15:44:16 +01:00
|
|
|
log.Debugf("new assets paths: %s", repl)
|
2019-02-12 19:09:25 +01:00
|
|
|
result = regex.ReplaceAllString(result, repl)
|
|
|
|
}
|
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
log.Noticef("writing to output file: %s", outFile)
|
2019-02-12 12:52:46 +01:00
|
|
|
err = ioutil.WriteFile(outFile, []byte(result), 0644)
|
2019-02-12 11:35:25 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not write to output file '%s': %s", outFile, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
//fmt.Println(string(html))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 20:04:07 +01:00
|
|
|
// process other files, copy...
|
|
|
|
for _, file := range conf.OtherFiles {
|
|
|
|
switch globalConfig.OtherFiles.Action {
|
|
|
|
case "copy":
|
|
|
|
from := conf.InputPath + "/" + file
|
|
|
|
to := conf.OutputPath + "/" + file
|
|
|
|
log.Noticef("copying file from '%s' to '%s'", from, to)
|
|
|
|
err := cpy.Copy(from, to)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
for _, el := range conf.Sub {
|
|
|
|
processContent(el)
|
|
|
|
}
|
2019-02-11 15:25:48 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 19:09:25 +01:00
|
|
|
func processAssets() {
|
|
|
|
switch globalConfig.Assets.Action {
|
|
|
|
case "copy":
|
|
|
|
from := globalConfig.Assets.FromPath
|
|
|
|
to := globalConfig.Assets.ToPath
|
|
|
|
if !strings.HasPrefix(from, "/") {
|
|
|
|
from = *inDir + "/" + from
|
|
|
|
}
|
|
|
|
if !strings.HasPrefix(to, "/") {
|
|
|
|
to = *outDir + "/" + to
|
|
|
|
}
|
|
|
|
log.Noticef("copying assets from '%s' to '%s'", from, to)
|
|
|
|
err := cpy.Copy(from, to)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-10 13:49:27 +01:00
|
|
|
func main() {
|
2019-02-11 15:00:27 +01:00
|
|
|
spew.Config.DisablePointerAddresses = true
|
|
|
|
spew.Config.DisableCapacities = true
|
|
|
|
spew.Config.DisableMethods = true
|
|
|
|
spew.Config.DisablePointerMethods = true
|
|
|
|
|
|
|
|
inDir = flag.String("in", "./", "input directory")
|
|
|
|
outDir = flag.String("out", "html", "output directory")
|
|
|
|
createOutDir := flag.Bool("create", false, "create output directory if not existing")
|
|
|
|
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
|
|
|
|
logLevel := flag.String("logLevel", "info", "log level: debug, info, warning, error")
|
2019-02-18 13:05:30 +01:00
|
|
|
version := flag.Bool("version", false, "print version of this executable")
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
flag.Parse()
|
2019-02-18 13:05:30 +01:00
|
|
|
if version != nil && *version {
|
|
|
|
fmt.Printf(`%11s: %s
|
|
|
|
%11s: %s
|
|
|
|
%11s: %s
|
|
|
|
`, "version", Version, "git hash", GitHash, "build time", BuildTime)
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
logBackend := logging.NewLogBackend(os.Stderr, "", 0)
|
|
|
|
logBackendFormatter := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(
|
|
|
|
`%{color}%{time:15:04:05.000} %{shortfunc} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,
|
|
|
|
))
|
|
|
|
logBackendLeveled := logging.AddModuleLevel(logBackendFormatter)
|
|
|
|
logBackendLevel := logging.INFO
|
|
|
|
if logLevel != nil {
|
|
|
|
switch *logLevel {
|
|
|
|
case "debug":
|
|
|
|
logBackendLevel = logging.DEBUG
|
|
|
|
break
|
|
|
|
|
|
|
|
case "info":
|
|
|
|
logBackendLevel = logging.INFO
|
|
|
|
break
|
|
|
|
|
2019-02-18 13:05:30 +01:00
|
|
|
case "notice":
|
|
|
|
logBackendLevel = logging.NOTICE
|
|
|
|
break
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
case "warning":
|
|
|
|
logBackendLevel = logging.WARNING
|
|
|
|
break
|
|
|
|
|
|
|
|
case "error":
|
|
|
|
logBackendLevel = logging.ERROR
|
|
|
|
break
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
logBackendLeveled.SetLevel(logBackendLevel, "")
|
|
|
|
logging.SetBackend(logBackendLeveled)
|
|
|
|
|
|
|
|
if inDir == nil || *inDir == "" {
|
|
|
|
log.Panic("input directory not specified")
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("input directory: %s", *inDir)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
if outDir == nil || *outDir == "" {
|
|
|
|
log.Panic("output directory not specified")
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("output directory: %s", *outDir)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
if createOutDir != nil && *createOutDir {
|
|
|
|
if _, err := os.Stat(*outDir); os.IsNotExist(err) {
|
|
|
|
log.Debugf("output directory '%s' does not exist", *outDir)
|
|
|
|
log.Debugf("trying to create output directory: %s", *outDir)
|
|
|
|
err := os.MkdirAll(*outDir, 0755)
|
|
|
|
if err != nil {
|
|
|
|
log.Panic(err)
|
|
|
|
}
|
|
|
|
log.Noticef("created output directory: %s", *outDir)
|
|
|
|
} else {
|
|
|
|
log.Noticef("output directory '%s' already exists", *outDir)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if fD, err := os.Stat(*outDir); os.IsNotExist(err) {
|
|
|
|
log.Panicf("output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
|
|
|
} else {
|
|
|
|
if fD == nil {
|
|
|
|
log.Panicf("something went wrong, could not get file handle for output dir %s", *outDir)
|
|
|
|
} else if !fD.IsDir() {
|
|
|
|
log.Panicf("output directory '%s' is not a directory", *outDir)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("reading global config...")
|
|
|
|
p := *inDir + "/config.yml"
|
|
|
|
data, err := ioutil.ReadFile(p)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not read file '%s': %s", p, err)
|
|
|
|
}
|
|
|
|
err = yaml.Unmarshal(data, globalConfig)
|
2019-02-10 13:49:27 +01:00
|
|
|
if err != nil {
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Panicf("could not parse YAML file '%s': %s", p, err)
|
2019-02-10 13:49:27 +01:00
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
log.Debug(spew.Sdump(globalConfig))
|
|
|
|
|
|
|
|
log.Debugf("reading input directory %s", *inDir)
|
|
|
|
|
2019-02-14 13:46:33 +01:00
|
|
|
defaultTemplate := "base.html"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultInputFile := "README.md"
|
|
|
|
defaultOutputFile := "index.html"
|
2019-02-12 11:35:25 +01:00
|
|
|
defaultPathStrip := "^[0-9]*_(.*)"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultPathIgnoreForNav := "^_"
|
2019-02-12 11:35:25 +01:00
|
|
|
defaultFilenameStrip := "(.*).md$"
|
|
|
|
defaultFilenameIgnore := "^_"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultFilenameOutputExtension := "html"
|
|
|
|
|
|
|
|
defaultPathConfig := new(PathConfig)
|
2019-02-12 12:52:46 +01:00
|
|
|
defaultPathConfig.Template = &defaultTemplate
|
2019-02-19 18:18:40 +01:00
|
|
|
defaultPathConfig.Index = &indexStruct{
|
|
|
|
InputFile: &defaultInputFile,
|
|
|
|
OutputFile: &defaultOutputFile,
|
|
|
|
}
|
|
|
|
defaultPathConfig.Path = &pathStruct{
|
|
|
|
Strip: &defaultPathStrip,
|
|
|
|
IgnoreForNav: &defaultPathIgnoreForNav,
|
|
|
|
}
|
|
|
|
defaultPathConfig.Filename = &filenameStruct{
|
|
|
|
Strip: &defaultFilenameStrip,
|
|
|
|
Ignore: &defaultFilenameIgnore,
|
|
|
|
OutputExtension: &defaultFilenameOutputExtension,
|
|
|
|
}
|
2019-02-12 10:21:51 +01:00
|
|
|
|
|
|
|
readContentDir(*inDir+"/content", *outDir, "", defaultPathConfig, contentConfig)
|
|
|
|
//spew.Dump(contentConfig)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
//spew.Dump(navMap)
|
|
|
|
|
2019-02-11 15:25:48 +01:00
|
|
|
processContent(contentConfig)
|
|
|
|
|
2019-02-12 19:09:25 +01:00
|
|
|
processAssets()
|
|
|
|
|
2019-02-10 13:49:27 +01:00
|
|
|
}
|