2019-02-10 13:49:27 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2019-02-24 19:12:31 +01:00
|
|
|
"bytes"
|
2019-02-11 15:00:27 +01:00
|
|
|
"flag"
|
2019-02-18 13:05:30 +01:00
|
|
|
"fmt"
|
2019-02-10 13:49:27 +01:00
|
|
|
"io/ioutil"
|
2019-02-11 15:00:27 +01:00
|
|
|
"os"
|
2019-02-12 11:35:25 +01:00
|
|
|
"path"
|
2019-02-11 15:25:48 +01:00
|
|
|
"regexp"
|
2019-02-11 15:00:27 +01:00
|
|
|
"strings"
|
2019-02-10 13:49:27 +01:00
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
"gitbase.de/apairon/mark2web/config"
|
2019-02-27 15:58:10 +01:00
|
|
|
"gitbase.de/apairon/mark2web/helper"
|
2019-02-18 18:02:35 +01:00
|
|
|
"github.com/Depado/bfchroma"
|
2019-02-11 15:00:27 +01:00
|
|
|
"github.com/davecgh/go-spew/spew"
|
2019-02-28 10:43:30 +01:00
|
|
|
"github.com/extemporalgenome/slug"
|
2019-02-14 13:46:33 +01:00
|
|
|
"github.com/flosch/pongo2"
|
2019-02-12 19:09:25 +01:00
|
|
|
cpy "github.com/otiai10/copy"
|
2019-02-12 11:35:25 +01:00
|
|
|
"gopkg.in/russross/blackfriday.v2"
|
2019-02-11 15:00:27 +01:00
|
|
|
"gopkg.in/yaml.v2"
|
2019-02-10 13:49:27 +01:00
|
|
|
)
|
|
|
|
|
2019-02-18 13:05:30 +01:00
|
|
|
var (
|
|
|
|
// Version is the app's version string
|
|
|
|
Version = "UNKNOWN"
|
|
|
|
// GitHash is the current git hash for this version
|
|
|
|
GitHash = "UNKNOWN"
|
|
|
|
// BuildTime is the time of build of this app
|
|
|
|
BuildTime = "UNKNOWN"
|
|
|
|
)
|
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
var log = helper.Log
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
var inDir *string
|
|
|
|
var outDir *string
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
var contentConfig = new(config.PathConfigTree)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
func readContentDir(inBase string, outBase string, dir string, conf *config.PathConfig, tree *config.PathConfigTree) {
|
2019-02-12 10:21:51 +01:00
|
|
|
inPath := inBase
|
|
|
|
if dir != "" {
|
|
|
|
inPath += "/" + dir
|
|
|
|
}
|
|
|
|
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("reading input directory: %s", inPath)
|
2019-02-12 10:21:51 +01:00
|
|
|
|
|
|
|
files, err := ioutil.ReadDir(inPath)
|
2019-02-11 15:00:27 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Panic(err)
|
|
|
|
}
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
tree.InputPath = inPath
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
// read config
|
2019-02-28 10:43:30 +01:00
|
|
|
newConfig := new(config.PathConfig)
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug("looking for config.yml ...")
|
2019-02-12 10:21:51 +01:00
|
|
|
configFile := inPath + "/config.yml"
|
|
|
|
if _, err = os.Stat(configFile); os.IsNotExist(err) {
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug("no config.yml found in this directory, using upper configs")
|
2019-02-28 10:43:30 +01:00
|
|
|
config.Merge(newConfig, conf)
|
2019-02-12 12:52:46 +01:00
|
|
|
// remove this
|
2019-02-28 10:43:30 +01:00
|
|
|
newConfig.This = config.ThisPathConfig{}
|
2019-02-11 15:00:27 +01:00
|
|
|
} else {
|
|
|
|
log.Debug("reading config...")
|
2019-02-12 10:21:51 +01:00
|
|
|
data, err := ioutil.ReadFile(configFile)
|
2019-02-11 15:00:27 +01:00
|
|
|
if err != nil {
|
2019-02-12 10:21:51 +01:00
|
|
|
log.Panicf("could not read file '%s': %s", configFile, err)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
err = yaml.Unmarshal(data, newConfig)
|
|
|
|
if err != nil {
|
2019-02-12 10:21:51 +01:00
|
|
|
log.Panicf("could not parse YAML file '%s': %s", configFile, err)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("merging config with upper config")
|
2019-02-13 19:50:26 +01:00
|
|
|
oldThis := newConfig.This
|
2019-02-28 10:43:30 +01:00
|
|
|
config.Merge(newConfig, conf)
|
2019-02-13 19:50:26 +01:00
|
|
|
newConfig.This = oldThis
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
log.Debug(spew.Sdump(newConfig))
|
|
|
|
}
|
|
|
|
|
|
|
|
tree.Config = newConfig
|
|
|
|
|
2019-02-11 15:25:48 +01:00
|
|
|
// calc outDir
|
2019-02-12 10:21:51 +01:00
|
|
|
stripedDir := dir
|
2019-02-19 18:18:40 +01:00
|
|
|
var regexStr *string
|
|
|
|
if newConfig.Path != nil {
|
|
|
|
regexStr = newConfig.Path.Strip
|
|
|
|
}
|
2019-02-11 15:25:48 +01:00
|
|
|
if regexStr != nil && *regexStr != "" {
|
2019-02-12 10:21:51 +01:00
|
|
|
if regex, err := regexp.Compile(*regexStr); err != nil {
|
|
|
|
log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+dir, err)
|
2019-02-11 15:25:48 +01:00
|
|
|
} else {
|
2019-02-12 11:35:25 +01:00
|
|
|
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
|
2019-02-11 15:25:48 +01:00
|
|
|
}
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
if tree.Config.This.Navname == nil {
|
|
|
|
navname := strings.Replace(stripedDir, "_", " ", -1)
|
|
|
|
tree.Config.This.Navname = &navname
|
|
|
|
}
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
stripedDir = slug.Slug(stripedDir)
|
2019-02-14 14:42:46 +01:00
|
|
|
outPath := outBase + "/" + stripedDir
|
|
|
|
outPath = path.Clean(outPath)
|
|
|
|
|
|
|
|
log.Infof("calculated output directory: %s", outPath)
|
|
|
|
tree.OutputPath = outPath
|
|
|
|
|
2019-02-11 15:00:27 +01:00
|
|
|
// first only files
|
|
|
|
for _, f := range files {
|
2019-02-12 10:21:51 +01:00
|
|
|
p := inPath + "/" + f.Name()
|
2019-02-15 12:11:27 +01:00
|
|
|
if !f.IsDir() && f.Name() != "config.yml" {
|
2019-02-12 20:04:07 +01:00
|
|
|
switch path.Ext(f.Name()) {
|
|
|
|
case ".md":
|
|
|
|
log.Debugf(".MD %s", p)
|
|
|
|
if tree.InputFiles == nil {
|
|
|
|
tree.InputFiles = make([]string, 0)
|
|
|
|
}
|
|
|
|
tree.InputFiles = append(tree.InputFiles, f.Name())
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
log.Debugf("FIL %s", p)
|
|
|
|
if tree.OtherFiles == nil {
|
|
|
|
tree.OtherFiles = make([]string, 0)
|
|
|
|
}
|
|
|
|
tree.OtherFiles = append(tree.OtherFiles, f.Name())
|
2019-02-12 10:21:51 +01:00
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// only directorys, needed config before
|
|
|
|
for _, f := range files {
|
2019-02-12 10:21:51 +01:00
|
|
|
p := inPath + "/" + f.Name()
|
2019-02-11 15:00:27 +01:00
|
|
|
if f.IsDir() {
|
|
|
|
log.Debugf("DIR %s", p)
|
2019-02-28 10:43:30 +01:00
|
|
|
newTree := new(config.PathConfigTree)
|
2019-02-11 15:00:27 +01:00
|
|
|
if tree.Sub == nil {
|
2019-02-28 10:43:30 +01:00
|
|
|
tree.Sub = make([]*config.PathConfigTree, 0)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
tree.Sub = append(tree.Sub, newTree)
|
2019-02-12 10:21:51 +01:00
|
|
|
readContentDir(inPath, outPath, f.Name(), newConfig, newTree)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
type navElement struct {
|
|
|
|
Navname string
|
|
|
|
GoTo string
|
2019-02-13 13:14:16 +01:00
|
|
|
Active bool
|
2019-02-12 15:28:22 +01:00
|
|
|
|
2019-02-13 19:50:26 +01:00
|
|
|
Data interface{}
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
This config.ThisPathConfig
|
2019-02-13 19:50:26 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
SubMap *map[string]*navElement
|
|
|
|
SubSlice *[]*navElement
|
|
|
|
}
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
func buildNavigation(conf *config.PathConfigTree, curNavMap *map[string]*navElement, curNavSlice *[]*navElement, navActive *[]*navElement, activeNav string) {
|
2019-02-12 15:28:22 +01:00
|
|
|
for _, el := range conf.Sub {
|
2019-02-19 18:18:40 +01:00
|
|
|
var ignNav *string
|
|
|
|
if p := el.Config.Path; p != nil {
|
|
|
|
ignNav = p.IgnoreForNav
|
|
|
|
}
|
2019-02-13 13:26:49 +01:00
|
|
|
if ignNav != nil && *ignNav != "" {
|
|
|
|
regex, err := regexp.Compile(*ignNav)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
|
|
|
|
}
|
|
|
|
if regex.MatchString(path.Base(el.InputPath)) {
|
2019-02-13 15:44:16 +01:00
|
|
|
log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
|
2019-02-13 13:26:49 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-13 13:14:16 +01:00
|
|
|
elPath := strings.TrimPrefix(el.OutputPath, *outDir+"/")
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
subMap := make(map[string]*navElement)
|
|
|
|
subSlice := make([]*navElement, 0)
|
|
|
|
navEl := navElement{
|
2019-02-13 13:14:16 +01:00
|
|
|
Active: strings.HasPrefix(activeNav, elPath),
|
2019-02-13 19:50:26 +01:00
|
|
|
Data: el.Config.Data,
|
2019-02-12 15:28:22 +01:00
|
|
|
SubMap: &subMap,
|
|
|
|
SubSlice: &subSlice,
|
|
|
|
}
|
|
|
|
|
2019-02-13 19:50:26 +01:00
|
|
|
navEl.This = el.Config.This
|
|
|
|
|
2019-02-13 15:44:16 +01:00
|
|
|
if navEl.Active {
|
|
|
|
// add to navActive level navigation
|
|
|
|
currentLevel := strings.Count(activeNav, "/")
|
|
|
|
if len(*navActive) <= currentLevel {
|
|
|
|
// not registered
|
|
|
|
*navActive = append(*navActive, &navEl)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
n := el.Config.This.Navname
|
|
|
|
if n != nil {
|
|
|
|
navEl.Navname = *n
|
|
|
|
}
|
|
|
|
g := el.Config.This.GoTo
|
|
|
|
if g != nil {
|
|
|
|
if strings.HasPrefix(*g, "/") {
|
|
|
|
// abslute
|
|
|
|
navEl.GoTo = *g
|
|
|
|
} else {
|
|
|
|
// relative
|
|
|
|
navEl.GoTo = elPath + "/" + *g
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
navEl.GoTo = elPath + "/"
|
|
|
|
}
|
|
|
|
|
|
|
|
if activeNav != "" && activeNav != "/" {
|
|
|
|
// calculate relative path
|
2019-02-27 17:33:26 +01:00
|
|
|
bToRoot := helper.BackToRoot(activeNav)
|
2019-02-15 12:11:27 +01:00
|
|
|
navEl.GoTo = bToRoot + navEl.GoTo
|
2019-02-12 15:46:14 +01:00
|
|
|
navEl.GoTo = path.Clean(navEl.GoTo)
|
2019-02-12 15:28:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
(*curNavMap)[navEl.Navname] = &navEl
|
|
|
|
if curNavSlice != nil {
|
|
|
|
*curNavSlice = append(*curNavSlice, &navEl)
|
|
|
|
}
|
|
|
|
|
2019-02-13 15:44:16 +01:00
|
|
|
buildNavigation(el, &subMap, &subSlice, navActive, activeNav)
|
2019-02-12 15:28:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
func processContent(conf *config.PathConfigTree) {
|
|
|
|
helper.CreateDirectory(conf.OutputPath)
|
2019-02-11 15:25:48 +01:00
|
|
|
|
2019-02-15 14:08:37 +01:00
|
|
|
curNavPath := strings.TrimPrefix(conf.OutputPath, *outDir)
|
|
|
|
curNavPath = strings.TrimPrefix(curNavPath, "/")
|
|
|
|
curNavPath = path.Clean(curNavPath)
|
|
|
|
if curNavPath == "." {
|
|
|
|
curNavPath = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
goTo := conf.Config.This.GoTo
|
|
|
|
if goTo != nil && *goTo != "" {
|
|
|
|
goToFixed := *goTo
|
|
|
|
if strings.HasPrefix(goToFixed, "/") {
|
2019-02-27 17:33:26 +01:00
|
|
|
goToFixed = helper.BackToRoot(curNavPath) + goToFixed
|
2019-02-15 14:08:37 +01:00
|
|
|
}
|
2019-02-15 15:05:48 +01:00
|
|
|
goToFixed = path.Clean(goToFixed)
|
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
switch config.Config.Webserver.Type {
|
2019-02-15 15:05:48 +01:00
|
|
|
case "apache":
|
|
|
|
htaccessFile := conf.OutputPath + "/.htaccess"
|
|
|
|
log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goToFixed)
|
|
|
|
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
|
|
|
|
RewriteRule ^$ %{REQUEST_URI}`+goToFixed+`/ [R,L]
|
|
|
|
`), 0644)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not write '%s': %s", htaccessFile, err)
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
2019-02-15 14:08:37 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
for _, file := range conf.InputFiles {
|
|
|
|
inFile := conf.InputPath + "/" + file
|
|
|
|
log.Debugf("reading file: %s", inFile)
|
|
|
|
|
|
|
|
input, err := ioutil.ReadFile(inFile)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not read '%s':%s", inFile, err)
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("processing input file '%s'", inFile)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
newConfig := new(config.PathConfig)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-13 19:14:23 +01:00
|
|
|
regex := regexp.MustCompile("(?s)^---(.*?)\\r?\\n\\r?---\\r?\\n\\r?")
|
2019-02-12 11:35:25 +01:00
|
|
|
yamlData := regex.Find(input)
|
|
|
|
if string(yamlData) != "" {
|
|
|
|
log.Debugf("found yaml header in '%s', merging config", inFile)
|
|
|
|
err = yaml.Unmarshal(yamlData, newConfig)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not parse YAML header from '%s': %s", inFile, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("merging config with upper config")
|
2019-02-13 19:50:26 +01:00
|
|
|
oldThis := newConfig.This
|
2019-02-28 10:43:30 +01:00
|
|
|
config.Merge(newConfig, conf.Config)
|
2019-02-13 19:50:26 +01:00
|
|
|
newConfig.This = oldThis
|
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
log.Debug(spew.Sdump(newConfig))
|
|
|
|
|
|
|
|
input = regex.ReplaceAll(input, []byte(""))
|
|
|
|
} else {
|
2019-02-28 10:43:30 +01:00
|
|
|
config.Merge(newConfig, conf.Config)
|
2019-02-12 11:35:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// ignore ???
|
|
|
|
ignoreFile := false
|
2019-02-19 18:18:40 +01:00
|
|
|
var ignoreRegex *string
|
|
|
|
var stripRegex *string
|
|
|
|
var outputExt *string
|
|
|
|
if f := newConfig.Filename; f != nil {
|
|
|
|
ignoreRegex = f.Ignore
|
|
|
|
stripRegex = f.Strip
|
|
|
|
outputExt = f.OutputExtension
|
|
|
|
}
|
2019-02-12 11:35:25 +01:00
|
|
|
if ignoreRegex != nil && *ignoreRegex != "" {
|
|
|
|
regex, err := regexp.Compile(*ignoreRegex)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
|
|
|
|
}
|
|
|
|
ignoreFile = regex.MatchString(file)
|
|
|
|
}
|
|
|
|
|
|
|
|
if ignoreFile {
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("ignoring file '%s', because of filename.ignore", inFile)
|
2019-02-12 11:35:25 +01:00
|
|
|
} else {
|
|
|
|
|
|
|
|
// build output filename
|
|
|
|
outputFilename := file
|
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
var indexInputFile *string
|
|
|
|
var indexOutputFile *string
|
|
|
|
if i := newConfig.Index; i != nil {
|
|
|
|
indexInputFile = i.InputFile
|
|
|
|
indexOutputFile = i.OutputFile
|
|
|
|
}
|
2019-02-12 11:35:25 +01:00
|
|
|
|
|
|
|
if indexInputFile != nil && *indexInputFile == file && indexOutputFile != nil && *indexOutputFile != "" {
|
|
|
|
outputFilename = *indexOutputFile
|
|
|
|
} else {
|
|
|
|
if stripRegex != nil && *stripRegex != "" {
|
|
|
|
regex, err := regexp.Compile(*stripRegex)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
|
|
|
|
}
|
|
|
|
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
|
|
|
|
}
|
|
|
|
if outputExt != nil && *outputExt != "" {
|
|
|
|
outputFilename += "." + *outputExt
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
outFile := conf.OutputPath + "/" + outputFilename
|
|
|
|
log.Debugf("using '%s' as output file", outFile)
|
|
|
|
|
2019-02-18 18:11:13 +01:00
|
|
|
var options []blackfriday.Option
|
2019-02-18 18:02:35 +01:00
|
|
|
|
2019-02-19 18:18:40 +01:00
|
|
|
var chromaRenderer *bool
|
|
|
|
var chromaStyle *string
|
|
|
|
if m := newConfig.Markdown; m != nil {
|
|
|
|
chromaRenderer = m.ChromaRenderer
|
|
|
|
chromaStyle = m.ChromaStyle
|
|
|
|
}
|
|
|
|
if chromaStyle == nil {
|
|
|
|
style := "monokai"
|
|
|
|
chromaStyle = &style
|
|
|
|
}
|
|
|
|
if chromaRenderer != nil && *chromaRenderer {
|
2019-02-18 18:11:13 +01:00
|
|
|
options = []blackfriday.Option{
|
2019-02-19 18:18:40 +01:00
|
|
|
blackfriday.WithRenderer(
|
|
|
|
bfchroma.NewRenderer(
|
|
|
|
bfchroma.Style(*chromaStyle),
|
|
|
|
),
|
|
|
|
),
|
2019-02-18 18:11:13 +01:00
|
|
|
}
|
2019-02-18 18:02:35 +01:00
|
|
|
}
|
|
|
|
|
2019-02-24 19:12:31 +01:00
|
|
|
// fix \r from markdown for blackfriday
|
|
|
|
input = bytes.Replace(input, []byte("\r"), []byte(""), -1)
|
2019-02-18 18:11:13 +01:00
|
|
|
html := blackfriday.Run(input, options...)
|
2019-02-12 11:35:25 +01:00
|
|
|
|
2019-02-13 19:14:23 +01:00
|
|
|
// use --- for splitting document in markdown parts
|
|
|
|
regex := regexp.MustCompile("\\r?\\n\\r?---\\r?\\n\\r?")
|
|
|
|
inputParts := regex.Split(string(input), -1)
|
2019-02-14 13:46:33 +01:00
|
|
|
htmlParts := make([]*pongo2.Value, 0)
|
2019-02-13 19:14:23 +01:00
|
|
|
for _, iPart := range inputParts {
|
2019-02-18 18:11:13 +01:00
|
|
|
htmlParts = append(htmlParts, pongo2.AsSafeValue(string(blackfriday.Run([]byte(iPart), options...))))
|
2019-02-13 19:14:23 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
// build navigation
|
2019-02-13 15:44:16 +01:00
|
|
|
navMap := make(map[string]*navElement)
|
|
|
|
navSlice := make([]*navElement, 0)
|
|
|
|
navActive := make([]*navElement, 0)
|
|
|
|
buildNavigation(contentConfig, &navMap, &navSlice, &navActive, curNavPath)
|
2019-02-12 15:28:22 +01:00
|
|
|
|
2019-02-12 12:52:46 +01:00
|
|
|
// read yaml header as data for template
|
2019-02-28 10:43:30 +01:00
|
|
|
ctx := make(map[string]interface{})
|
2019-02-19 15:18:37 +01:00
|
|
|
ctx["This"] = newConfig.This
|
2019-02-12 13:05:26 +01:00
|
|
|
ctx["Meta"] = newConfig.Meta
|
|
|
|
ctx["Data"] = newConfig.Data
|
2019-02-12 15:28:22 +01:00
|
|
|
ctx["NavMap"] = navMap
|
2019-02-13 15:44:16 +01:00
|
|
|
ctx["NavSlice"] = navSlice
|
|
|
|
ctx["NavActive"] = navActive
|
2019-02-14 13:46:33 +01:00
|
|
|
ctx["Body"] = pongo2.AsSafeValue(string(html))
|
2019-02-13 19:14:23 +01:00
|
|
|
ctx["BodyParts"] = htmlParts
|
2019-02-27 17:33:26 +01:00
|
|
|
ctx["AssetsPath"] = config.Config.Assets.ToPath
|
|
|
|
ctx["CurrentPath"] = curNavPath
|
2019-02-12 12:52:46 +01:00
|
|
|
|
2019-02-27 15:58:10 +01:00
|
|
|
// register functions
|
2019-02-27 17:33:26 +01:00
|
|
|
ctx["fnRequest"] = helper.RequestFn
|
|
|
|
ctx["fnRender"] = helper.RenderFn
|
2019-02-27 15:58:10 +01:00
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
|
|
|
templateFilename := *newConfig.Template
|
2019-02-28 10:43:30 +01:00
|
|
|
result, err := helper.RenderTemplate(*newConfig.Template, conf, newConfig, &ctx)
|
2019-02-12 12:52:46 +01:00
|
|
|
if err != nil {
|
2019-02-27 17:33:26 +01:00
|
|
|
log.Panicf("could not execute template '%s' for input file '%s': %s", templateFilename, inFile, err)
|
2019-02-12 12:52:46 +01:00
|
|
|
}
|
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
result = helper.FixAssetsPath(result, curNavPath)
|
2019-02-12 19:09:25 +01:00
|
|
|
|
2019-02-12 11:35:25 +01:00
|
|
|
log.Noticef("writing to output file: %s", outFile)
|
2019-02-12 12:52:46 +01:00
|
|
|
err = ioutil.WriteFile(outFile, []byte(result), 0644)
|
2019-02-12 11:35:25 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not write to output file '%s': %s", outFile, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
//fmt.Println(string(html))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 20:04:07 +01:00
|
|
|
// process other files, copy...
|
|
|
|
for _, file := range conf.OtherFiles {
|
2019-02-27 17:33:26 +01:00
|
|
|
switch config.Config.OtherFiles.Action {
|
2019-02-12 20:04:07 +01:00
|
|
|
case "copy":
|
|
|
|
from := conf.InputPath + "/" + file
|
|
|
|
to := conf.OutputPath + "/" + file
|
|
|
|
log.Noticef("copying file from '%s' to '%s'", from, to)
|
|
|
|
err := cpy.Copy(from, to)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 10:21:51 +01:00
|
|
|
for _, el := range conf.Sub {
|
|
|
|
processContent(el)
|
|
|
|
}
|
2019-02-11 15:25:48 +01:00
|
|
|
}
|
|
|
|
|
2019-02-12 19:09:25 +01:00
|
|
|
func processAssets() {
|
2019-02-27 17:33:26 +01:00
|
|
|
switch config.Config.Assets.Action {
|
2019-02-12 19:09:25 +01:00
|
|
|
case "copy":
|
2019-02-27 17:33:26 +01:00
|
|
|
from := config.Config.Assets.FromPath
|
|
|
|
to := config.Config.Assets.ToPath
|
2019-02-12 19:09:25 +01:00
|
|
|
if !strings.HasPrefix(from, "/") {
|
|
|
|
from = *inDir + "/" + from
|
|
|
|
}
|
|
|
|
if !strings.HasPrefix(to, "/") {
|
|
|
|
to = *outDir + "/" + to
|
|
|
|
}
|
|
|
|
log.Noticef("copying assets from '%s' to '%s'", from, to)
|
|
|
|
err := cpy.Copy(from, to)
|
|
|
|
if err != nil {
|
|
|
|
log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-10 13:49:27 +01:00
|
|
|
func main() {
|
2019-02-11 15:00:27 +01:00
|
|
|
inDir = flag.String("in", "./", "input directory")
|
|
|
|
outDir = flag.String("out", "html", "output directory")
|
|
|
|
createOutDir := flag.Bool("create", false, "create output directory if not existing")
|
|
|
|
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
|
|
|
|
logLevel := flag.String("logLevel", "info", "log level: debug, info, warning, error")
|
2019-02-18 13:05:30 +01:00
|
|
|
version := flag.Bool("version", false, "print version of this executable")
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
flag.Parse()
|
2019-02-18 13:05:30 +01:00
|
|
|
if version != nil && *version {
|
|
|
|
fmt.Printf(`%11s: %s
|
|
|
|
%11s: %s
|
|
|
|
%11s: %s
|
|
|
|
`, "version", Version, "git hash", GitHash, "build time", BuildTime)
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
level := "info"
|
2019-02-11 15:00:27 +01:00
|
|
|
if logLevel != nil {
|
2019-02-27 17:33:26 +01:00
|
|
|
level = *logLevel
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
2019-02-27 17:33:26 +01:00
|
|
|
helper.ConfigureLogger(level)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
if inDir == nil || *inDir == "" {
|
|
|
|
log.Panic("input directory not specified")
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("input directory: %s", *inDir)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
if outDir == nil || *outDir == "" {
|
|
|
|
log.Panic("output directory not specified")
|
|
|
|
}
|
2019-02-12 20:04:07 +01:00
|
|
|
log.Infof("output directory: %s", *outDir)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
|
|
|
if createOutDir != nil && *createOutDir {
|
|
|
|
if _, err := os.Stat(*outDir); os.IsNotExist(err) {
|
|
|
|
log.Debugf("output directory '%s' does not exist", *outDir)
|
|
|
|
log.Debugf("trying to create output directory: %s", *outDir)
|
|
|
|
err := os.MkdirAll(*outDir, 0755)
|
|
|
|
if err != nil {
|
|
|
|
log.Panic(err)
|
|
|
|
}
|
|
|
|
log.Noticef("created output directory: %s", *outDir)
|
|
|
|
} else {
|
|
|
|
log.Noticef("output directory '%s' already exists", *outDir)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if fD, err := os.Stat(*outDir); os.IsNotExist(err) {
|
|
|
|
log.Panicf("output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
|
|
|
} else {
|
|
|
|
if fD == nil {
|
|
|
|
log.Panicf("something went wrong, could not get file handle for output dir %s", *outDir)
|
|
|
|
} else if !fD.IsDir() {
|
|
|
|
log.Panicf("output directory '%s' is not a directory", *outDir)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("reading global config...")
|
2019-02-27 17:33:26 +01:00
|
|
|
configFilename := *inDir + "/config.yml"
|
|
|
|
err := config.ReadGlobalConfig(configFilename)
|
2019-02-11 15:00:27 +01:00
|
|
|
if err != nil {
|
2019-02-27 17:33:26 +01:00
|
|
|
log.Panicf("could not read file '%s': %s", configFilename, err)
|
2019-02-11 15:00:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
log.Debugf("reading input directory %s", *inDir)
|
|
|
|
|
2019-02-14 13:46:33 +01:00
|
|
|
defaultTemplate := "base.html"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultInputFile := "README.md"
|
|
|
|
defaultOutputFile := "index.html"
|
2019-02-12 11:35:25 +01:00
|
|
|
defaultPathStrip := "^[0-9]*_(.*)"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultPathIgnoreForNav := "^_"
|
2019-02-12 11:35:25 +01:00
|
|
|
defaultFilenameStrip := "(.*).md$"
|
|
|
|
defaultFilenameIgnore := "^_"
|
2019-02-12 10:21:51 +01:00
|
|
|
defaultFilenameOutputExtension := "html"
|
|
|
|
|
2019-02-28 10:43:30 +01:00
|
|
|
defaultPathConfig := new(config.PathConfig)
|
2019-02-12 12:52:46 +01:00
|
|
|
defaultPathConfig.Template = &defaultTemplate
|
2019-02-28 10:43:30 +01:00
|
|
|
defaultPathConfig.Index = &config.IndexConfig{
|
2019-02-19 18:18:40 +01:00
|
|
|
InputFile: &defaultInputFile,
|
|
|
|
OutputFile: &defaultOutputFile,
|
|
|
|
}
|
2019-02-28 10:43:30 +01:00
|
|
|
defaultPathConfig.Path = &config.DirnameConfig{
|
2019-02-19 18:18:40 +01:00
|
|
|
Strip: &defaultPathStrip,
|
|
|
|
IgnoreForNav: &defaultPathIgnoreForNav,
|
|
|
|
}
|
2019-02-28 10:43:30 +01:00
|
|
|
defaultPathConfig.Filename = &config.FilenameConfig{
|
2019-02-19 18:18:40 +01:00
|
|
|
Strip: &defaultFilenameStrip,
|
|
|
|
Ignore: &defaultFilenameIgnore,
|
|
|
|
OutputExtension: &defaultFilenameOutputExtension,
|
|
|
|
}
|
2019-02-12 10:21:51 +01:00
|
|
|
|
|
|
|
readContentDir(*inDir+"/content", *outDir, "", defaultPathConfig, contentConfig)
|
|
|
|
//spew.Dump(contentConfig)
|
2019-02-11 15:00:27 +01:00
|
|
|
|
2019-02-12 15:28:22 +01:00
|
|
|
//spew.Dump(navMap)
|
|
|
|
|
2019-02-27 17:33:26 +01:00
|
|
|
helper.SetTemplateDir(*inDir + "/templates")
|
2019-02-11 15:25:48 +01:00
|
|
|
processContent(contentConfig)
|
|
|
|
|
2019-02-12 19:09:25 +01:00
|
|
|
processAssets()
|
|
|
|
|
2019-02-10 13:49:27 +01:00
|
|
|
}
|