Compare commits
54 Commits
Author | SHA1 | Date | |
---|---|---|---|
e4c72b348f | |||
08652832ed | |||
0683e327c9 | |||
fc4eb98b69 | |||
f2ef597883 | |||
5cdc4203b7 | |||
a781485c0a | |||
58ffd68822 | |||
956523b4b0 | |||
1f54fd0373 | |||
e491a16cc2 | |||
9e6f023180 | |||
6275c695f4 | |||
f618eb28e9 | |||
d48f9cd6ca | |||
|
93ea8b8b1b | ||
|
daed37587e | ||
|
7695f42e20 | ||
|
5d6d03702e | ||
|
4cb09fb81f | ||
|
234137f22f | ||
|
267d1010bb | ||
|
740fb94556 | ||
|
a17926f54b | ||
|
b9c4553577 | ||
|
745c886cec | ||
|
ff1da084af | ||
|
4a9a3eec06 | ||
|
3573e23212 | ||
|
1312dcecb5 | ||
|
f8f40b2134 | ||
|
58681bd7df | ||
|
7df4a03171 | ||
|
5624c7af87 | ||
|
23fd5fe1d4 | ||
|
50139c6f51 | ||
|
c5fd151060 | ||
|
90a39e3027 | ||
|
9d855f586d | ||
|
946f586ccb | ||
|
cd8c7fa657 | ||
|
3e3d1f05a0 | ||
|
5cc4b9d001 | ||
|
3c87da15e1 | ||
|
baa38b668e | ||
|
d652afd633 | ||
|
ada333a0e1 | ||
|
0dfe0f8142 | ||
|
70d7497eda | ||
|
dfc932b7b0 | ||
|
938e597f3f | ||
|
29f01a2618 | ||
|
66a9ebe452 | ||
|
b2e0d78a2c |
36
.air.toml
Normal file
36
.air.toml
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
root = "."
|
||||||
|
testdata_dir = "testdata"
|
||||||
|
tmp_dir = "tmp"
|
||||||
|
|
||||||
|
[build]
|
||||||
|
bin = "./tmp/main"
|
||||||
|
cmd = "go build -o ./tmp/main ./cmd/mark2web"
|
||||||
|
delay = 1000
|
||||||
|
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
|
||||||
|
exclude_file = []
|
||||||
|
exclude_regex = ["_test.go"]
|
||||||
|
exclude_unchanged = false
|
||||||
|
follow_symlink = false
|
||||||
|
full_bin = ""
|
||||||
|
include_dir = []
|
||||||
|
include_ext = ["go", "tpl", "tmpl", "html"]
|
||||||
|
kill_delay = "0s"
|
||||||
|
log = "build-errors.log"
|
||||||
|
send_interrupt = false
|
||||||
|
stop_on_error = true
|
||||||
|
|
||||||
|
[color]
|
||||||
|
app = ""
|
||||||
|
build = "yellow"
|
||||||
|
main = "magenta"
|
||||||
|
runner = "green"
|
||||||
|
watcher = "cyan"
|
||||||
|
|
||||||
|
[log]
|
||||||
|
time = false
|
||||||
|
|
||||||
|
[misc]
|
||||||
|
clean_on_exit = false
|
||||||
|
|
||||||
|
[screen]
|
||||||
|
clear_on_rebuild = false
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -1,3 +1,6 @@
|
|||||||
test.html
|
/html/
|
||||||
html/
|
/build/dist
|
||||||
mark2web
|
/coverage.out
|
||||||
|
/test/out
|
||||||
|
/mark2web
|
||||||
|
/tmp/
|
69
.gitmodules
vendored
69
.gitmodules
vendored
@ -1,69 +0,0 @@
|
|||||||
[submodule "vendor/gopkg.in/russross/blackfriday.v2"]
|
|
||||||
path = vendor/gopkg.in/russross/blackfriday.v2
|
|
||||||
url = https://gopkg.in/russross/blackfriday.v2
|
|
||||||
[submodule "vendor/github.com/shurcooL/sanitized_anchor_name"]
|
|
||||||
path = vendor/github.com/shurcooL/sanitized_anchor_name
|
|
||||||
url = https://github.com/shurcooL/sanitized_anchor_name
|
|
||||||
[submodule "vendor/github.com/Depado/bfchroma"]
|
|
||||||
path = vendor/github.com/Depado/bfchroma
|
|
||||||
url = https://github.com/Depado/bfchroma
|
|
||||||
[submodule "vendor/github.com/alecthomas/chroma"]
|
|
||||||
path = vendor/github.com/alecthomas/chroma
|
|
||||||
url = https://github.com/alecthomas/chroma
|
|
||||||
[submodule "vendor/github.com/dlclark/regexp2"]
|
|
||||||
path = vendor/github.com/dlclark/regexp2
|
|
||||||
url = https://github.com/dlclark/regexp2
|
|
||||||
[submodule "vendor/github.com/danwakefield/fnmatch"]
|
|
||||||
path = vendor/github.com/danwakefield/fnmatch
|
|
||||||
url = https://github.com/danwakefield/fnmatch
|
|
||||||
[submodule "vendor/github.com/op/go-logging"]
|
|
||||||
path = vendor/github.com/op/go-logging
|
|
||||||
url = https://github.com/op/go-logging
|
|
||||||
[submodule "vendor/gopkg.in/yaml.v2"]
|
|
||||||
path = vendor/gopkg.in/yaml.v2
|
|
||||||
url = https://gopkg.in/yaml.v2
|
|
||||||
[submodule "vendor/github.com/davecgh/go-spew"]
|
|
||||||
path = vendor/github.com/davecgh/go-spew
|
|
||||||
url = https://github.com/davecgh/go-spew
|
|
||||||
[submodule "vendor/github.com/imdario/mergo"]
|
|
||||||
path = vendor/github.com/imdario/mergo
|
|
||||||
url = https://github.com/imdario/mergo
|
|
||||||
[submodule "vendor/github.com/otiai10/copy"]
|
|
||||||
path = vendor/github.com/otiai10/copy
|
|
||||||
url = https://github.com/otiai10/copy
|
|
||||||
[submodule "vendor/github.com/flosch/pongo2"]
|
|
||||||
path = vendor/github.com/flosch/pongo2
|
|
||||||
url = https://github.com/flosch/pongo2
|
|
||||||
[submodule "vendor/github.com/juju/errors"]
|
|
||||||
path = vendor/github.com/juju/errors
|
|
||||||
url = https://github.com/juju/errors
|
|
||||||
[submodule "vendor/github.com/flosch/pongo2-addons"]
|
|
||||||
path = vendor/github.com/flosch/pongo2-addons
|
|
||||||
url = https://github.com/flosch/pongo2-addons
|
|
||||||
[submodule "vendor/github.com/extemporalgenome/slug"]
|
|
||||||
path = vendor/github.com/extemporalgenome/slug
|
|
||||||
url = https://github.com/extemporalgenome/slug
|
|
||||||
[submodule "vendor/golang.org/x/text"]
|
|
||||||
path = vendor/golang.org/x/text
|
|
||||||
url = https://go.googlesource.com/text
|
|
||||||
[submodule "vendor/github.com/flosch/go-humanize"]
|
|
||||||
path = vendor/github.com/flosch/go-humanize
|
|
||||||
url = https://github.com/flosch/go-humanize
|
|
||||||
[submodule "vendor/github.com/russross/blackfriday"]
|
|
||||||
path = vendor/github.com/russross/blackfriday
|
|
||||||
url = https://github.com/russross/blackfriday
|
|
||||||
[submodule "vendor/github.com/robertkrimen/otto"]
|
|
||||||
path = vendor/github.com/robertkrimen/otto
|
|
||||||
url = https://github.com/robertkrimen/otto
|
|
||||||
[submodule "vendor/gopkg.in/sourcemap.v1"]
|
|
||||||
path = vendor/gopkg.in/sourcemap.v1
|
|
||||||
url = https://gopkg.in/sourcemap.v1
|
|
||||||
[submodule "vendor/github.com/ddliu/motto"]
|
|
||||||
path = vendor/github.com/ddliu/motto
|
|
||||||
url = https://github.com/ddliu/motto
|
|
||||||
[submodule "vendor/github.com/disintegration/imaging"]
|
|
||||||
path = vendor/github.com/disintegration/imaging
|
|
||||||
url = https://github.com/disintegration/imaging
|
|
||||||
[submodule "vendor/golang.org/x/image"]
|
|
||||||
path = vendor/golang.org/x/image
|
|
||||||
url = https://go.googlesource.com/image
|
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@ -6,7 +6,7 @@
|
|||||||
"commands": [
|
"commands": [
|
||||||
{
|
{
|
||||||
"match": "website/.*",
|
"match": "website/.*",
|
||||||
"cmd": "time mark2web -in ${workspaceRoot}/website -out ${workspaceRoot}/html -create",
|
"cmd": "time mark2web -in ${workspaceRoot}/website -out ${workspaceRoot}/html -create -logLevel warning -progress",
|
||||||
"silent": false
|
"silent": false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
26
LICENSE
Normal file
26
LICENSE
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
Copyright (c) 2019, Sebastian Frank
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||||
|
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
The views and conclusions contained in the software and documentation are those
|
||||||
|
of the authors and should not be interpreted as representing official policies,
|
||||||
|
either expressed or implied, of the mark2web project.
|
@ -1,5 +1,7 @@
|
|||||||
# mark2web
|
# mark2web
|
||||||
|
|
||||||
|
[![Build Status](https://ci.basehosts.de/api/badges/apairon/mark2web/status.svg)](https://ci.basehosts.de/apairon/mark2web)
|
||||||
|
|
||||||
mark2web ist ein Website-Generator, der als Eingabe Markdown-Dateien, Templates und Konfigurations-Dateien nutzt.
|
mark2web ist ein Website-Generator, der als Eingabe Markdown-Dateien, Templates und Konfigurations-Dateien nutzt.
|
||||||
|
|
||||||
Die vorgesehene Arbeitsweise ist die Pflege der Inhalte über eine Versionsverwaltung (z.B. git) und anschließende CI/CD-Pipeline, welche den Generator aufruft und die fertige Website publiziert.
|
Die vorgesehene Arbeitsweise ist die Pflege der Inhalte über eine Versionsverwaltung (z.B. git) und anschließende CI/CD-Pipeline, welche den Generator aufruft und die fertige Website publiziert.
|
||||||
@ -8,4 +10,8 @@ Die vorgesehene Arbeitsweise ist die Pflege der Inhalte über eine Versionsverwa
|
|||||||
|
|
||||||
Die Dokumentation ist auf der [mark2web-Website](https://www.mark2web.de/) zu finden. Außerdem ist die Dokumentation im Verzeichnis `website/content` dieses Repositories, da dies das Ausgangsmaterial der Projekt-Website ist.
|
Die Dokumentation ist auf der [mark2web-Website](https://www.mark2web.de/) zu finden. Außerdem ist die Dokumentation im Verzeichnis `website/content` dieses Repositories, da dies das Ausgangsmaterial der Projekt-Website ist.
|
||||||
|
|
||||||
Die öffentliche Website ist mit **mark2web** generiert.
|
Die öffentliche Website ist mit **mark2web** generiert.
|
||||||
|
|
||||||
|
## Lizenz
|
||||||
|
|
||||||
|
Das Projekt **mark2web** unterliegt der Lizenz "Simplified BSD License" (siehe [LICENSE](LICENSE)).
|
@ -1,3 +0,0 @@
|
|||||||
NEUERUNGEN:
|
|
||||||
|
|
||||||
- `t=ZIEL_VERZEICHNIS` Parameter im `image_process` Filter
|
|
4
build.sh
4
build.sh
@ -1,4 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
mkdir -p dist
|
|
||||||
go build -v -ldflags "-X main.Version=`git describe --tags --long` -X main.GitHash=`git rev-parse HEAD` -X main.BuildTime=`date -u '+%Y-%m-%d_%I:%M:%S%p'`" -o dist/mark2web-`cat VERSION`-${GOOS}-${GOARCH}${FILEEXT}
|
|
4
build/RELEASE.md
Normal file
4
build/RELEASE.md
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
NEUERUNGEN:
|
||||||
|
|
||||||
|
- pongo2 v4
|
||||||
|
- natives brotli Package ohne cgo
|
1
build/VERSION
Normal file
1
build/VERSION
Normal file
@ -0,0 +1 @@
|
|||||||
|
1.2.1
|
@ -1,4 +1,5 @@
|
|||||||
kind: pipeline
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
name: default
|
name: default
|
||||||
|
|
||||||
workspace:
|
workspace:
|
||||||
@ -6,24 +7,43 @@ workspace:
|
|||||||
path: src/gitbase.de/apairon/mark2web
|
path: src/gitbase.de/apairon/mark2web
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: build for linux
|
- name: init submodules
|
||||||
|
image: docker:git
|
||||||
|
commands:
|
||||||
|
#- git submodule update --init --recursive
|
||||||
|
- git fetch --tags
|
||||||
|
when:
|
||||||
|
event: [ push, tag ]
|
||||||
|
|
||||||
|
- name: test
|
||||||
image: golang:latest
|
image: golang:latest
|
||||||
environment:
|
environment:
|
||||||
CGO_ENABLED: 0
|
CGO_ENABLED: 1
|
||||||
GOOS: linux
|
GOOS: linux
|
||||||
GOARCH: amd64
|
GOARCH: amd64
|
||||||
commands:
|
commands:
|
||||||
- git submodule update --init --recursive
|
# fake term for goconvey color output
|
||||||
- git fetch --tags
|
- env TERM=xterm-color256 go test -v -coverprofile coverage.out ./pkg/*
|
||||||
- ./build.sh
|
when:
|
||||||
|
event: [ push, tag ]
|
||||||
|
|
||||||
|
- name: build for linux
|
||||||
|
image: golang:latest
|
||||||
|
environment:
|
||||||
|
CGO_ENABLED: 1
|
||||||
|
GOOS: linux
|
||||||
|
GOARCH: amd64
|
||||||
|
commands:
|
||||||
|
- scripts/build.sh
|
||||||
when:
|
when:
|
||||||
event: [ push, tag ]
|
event: [ push, tag ]
|
||||||
|
|
||||||
- name: test with example content
|
- name: test with example content
|
||||||
image: alpine
|
image: alpine
|
||||||
commands:
|
commands:
|
||||||
- ./dist/mark2web-`cat VERSION`-linux-amd64 -version
|
- apk add --no-cache libc6-compat
|
||||||
- ./dist/mark2web-`cat VERSION`-linux-amd64 -in example -out example_out -create -logLevel debug
|
- dist/mark2web-`cat build/VERSION`-linux-amd64 -version
|
||||||
|
- dist/mark2web-`cat build/VERSION`-linux-amd64 -in example -out example_out -create -logLevel debug
|
||||||
when:
|
when:
|
||||||
event: [ push, tag ]
|
event: [ push, tag ]
|
||||||
|
|
||||||
@ -34,7 +54,7 @@ steps:
|
|||||||
GOOS: freebsd
|
GOOS: freebsd
|
||||||
GOARCH: amd64
|
GOARCH: amd64
|
||||||
commands:
|
commands:
|
||||||
- ./build.sh
|
- scripts/build.sh
|
||||||
when:
|
when:
|
||||||
event: [ tag ]
|
event: [ tag ]
|
||||||
|
|
||||||
@ -45,7 +65,7 @@ steps:
|
|||||||
GOOS: darwin
|
GOOS: darwin
|
||||||
GOARCH: amd64
|
GOARCH: amd64
|
||||||
commands:
|
commands:
|
||||||
- ./build.sh
|
- scripts/build.sh
|
||||||
when:
|
when:
|
||||||
event: [ tag ]
|
event: [ tag ]
|
||||||
|
|
||||||
@ -57,15 +77,15 @@ steps:
|
|||||||
GOARCH: amd64
|
GOARCH: amd64
|
||||||
FILEEXT: .exe
|
FILEEXT: .exe
|
||||||
commands:
|
commands:
|
||||||
- ./build.sh
|
- scripts/build.sh
|
||||||
when:
|
when:
|
||||||
event: [ tag ]
|
event: [ tag ]
|
||||||
|
|
||||||
- name: build docker image
|
- name: build docker image
|
||||||
image: docker
|
image: docker
|
||||||
commands:
|
commands:
|
||||||
- cp dist/mark2web-`cat VERSION`-linux-amd64 mark2web
|
- cp dist/mark2web-`cat build/VERSION`-linux-amd64 build/package/mark2web
|
||||||
- docker build -t apairon/mark2web .
|
- docker build -t apairon/mark2web build/package
|
||||||
volumes:
|
volumes:
|
||||||
- name: docker
|
- name: docker
|
||||||
path: /var/run/docker.sock
|
path: /var/run/docker.sock
|
||||||
@ -73,6 +93,17 @@ steps:
|
|||||||
branch: [ master ]
|
branch: [ master ]
|
||||||
event: [ push ]
|
event: [ push ]
|
||||||
|
|
||||||
|
- name: build website
|
||||||
|
image: apairon/mark2web:latest
|
||||||
|
pull: never
|
||||||
|
commands:
|
||||||
|
- /mark2web -version
|
||||||
|
- /mark2web -in website -out html -create -logLevel info
|
||||||
|
when:
|
||||||
|
branch: [ master ]
|
||||||
|
event: [ promote, push ]
|
||||||
|
target: [ "", website ]
|
||||||
|
|
||||||
- name: deploy website
|
- name: deploy website
|
||||||
image: apairon/mark2web:latest
|
image: apairon/mark2web:latest
|
||||||
pull: never
|
pull: never
|
||||||
@ -80,12 +111,10 @@ steps:
|
|||||||
RSYNC_PASS:
|
RSYNC_PASS:
|
||||||
from_secret: rsync_pass
|
from_secret: rsync_pass
|
||||||
commands:
|
commands:
|
||||||
- /mark2web -version
|
|
||||||
- /mark2web -in website -out html -create -logLevel info
|
|
||||||
- '
|
- '
|
||||||
rsync -rlcgD -i -u -v --stats
|
rsync -rlcgD -i -u -v --stats
|
||||||
--delete
|
--delete
|
||||||
-e "sshpass -p $${RSYNC_PASS} ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 22222"
|
-e "sshpass -p $${RSYNC_PASS} ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o HostKeyAlgorithms=ssh-rsa -o PubkeyAcceptedKeyTypes=ssh-rsa -p 22222"
|
||||||
html/
|
html/
|
||||||
basiskonfiguration_mark2web_rsync@deploy.bc1.basehosts.de:./'
|
basiskonfiguration_mark2web_rsync@deploy.bc1.basehosts.de:./'
|
||||||
when:
|
when:
|
||||||
@ -110,8 +139,8 @@ steps:
|
|||||||
base_url: https://gitbase.de
|
base_url: https://gitbase.de
|
||||||
files:
|
files:
|
||||||
- dist/*
|
- dist/*
|
||||||
title: VERSION
|
title: build/VERSION
|
||||||
note: RELEASE.md
|
note: build/RELEASE.md
|
||||||
checksum:
|
checksum:
|
||||||
- md5
|
- md5
|
||||||
- sha256
|
- sha256
|
@ -1,4 +1,4 @@
|
|||||||
FROM alpine
|
FROM alpine
|
||||||
RUN apk update && apk add ca-certificates openssh-client sshpass rsync gzip && rm -r /var/cache/
|
RUN apk update && apk add ca-certificates openssh-client sshpass rsync gzip libc6-compat bash && rm -r /var/cache/
|
||||||
ADD mark2web /
|
ADD mark2web /
|
||||||
CMD ["/mark2web"]
|
CMD ["/mark2web"]
|
@ -6,8 +6,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
"gitbase.de/apairon/mark2web/pkg/filter"
|
||||||
"gitbase.de/apairon/mark2web/helper"
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/progress"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -19,16 +21,13 @@ var (
|
|||||||
BuildTime = "UNKNOWN"
|
BuildTime = "UNKNOWN"
|
||||||
)
|
)
|
||||||
|
|
||||||
var log = helper.Log
|
|
||||||
|
|
||||||
var contentConfig = new(config.PathConfigTree)
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
inDir := flag.String("in", "./", "input directory")
|
inDir := flag.String("in", "./", "input directory")
|
||||||
outDir := flag.String("out", "html", "output directory")
|
outDir := flag.String("out", "html", "output directory")
|
||||||
createOutDir := flag.Bool("create", false, "create output directory if not existing")
|
createOutDir := flag.Bool("create", false, "create output directory if not existing")
|
||||||
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
|
//clearOutDir := flag.Bool("clear", false, "clear output directory before generating website")
|
||||||
logLevel := flag.String("logLevel", "info", "log level: debug, info, warning, error")
|
logLevel := flag.String("logLevel", "", "log level: debug, info, notice, warning, error")
|
||||||
|
progressBars := flag.Bool("progress", false, "show progress bars for jobs")
|
||||||
version := flag.Bool("version", false, "print version of this executable")
|
version := flag.Bool("version", false, "print version of this executable")
|
||||||
|
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
@ -40,60 +39,66 @@ func main() {
|
|||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
level := "info"
|
level := "notice"
|
||||||
if logLevel != nil {
|
|
||||||
|
if progressBars != nil && *progressBars {
|
||||||
|
progress.Start()
|
||||||
|
level = "warning" // disable log for progressbars
|
||||||
|
}
|
||||||
|
|
||||||
|
if logLevel != nil && *logLevel != "" {
|
||||||
level = *logLevel
|
level = *logLevel
|
||||||
}
|
}
|
||||||
helper.ConfigureLogger(level)
|
logger.SetLogLevel(level)
|
||||||
|
|
||||||
if inDir == nil || *inDir == "" {
|
if inDir == nil || *inDir == "" {
|
||||||
log.Panic("input directory not specified")
|
logger.Exit("input directory not specified")
|
||||||
}
|
}
|
||||||
iDir := path.Clean(*inDir)
|
iDir := path.Clean(*inDir)
|
||||||
inDir = &iDir
|
inDir = &iDir
|
||||||
log.Infof("input directory: %s", *inDir)
|
logger.I("input directory: %s", *inDir)
|
||||||
|
|
||||||
if outDir == nil || *outDir == "" {
|
if outDir == nil || *outDir == "" {
|
||||||
log.Panic("output directory not specified")
|
logger.Exit("output directory not specified")
|
||||||
}
|
}
|
||||||
oDir := path.Clean(*outDir)
|
oDir := path.Clean(*outDir)
|
||||||
outDir = &oDir
|
outDir = &oDir
|
||||||
log.Infof("output directory: %s", *outDir)
|
logger.I("output directory: %s", *outDir)
|
||||||
|
|
||||||
if createOutDir != nil && *createOutDir {
|
if createOutDir != nil && *createOutDir {
|
||||||
if _, err := os.Stat(*outDir); os.IsNotExist(err) {
|
if _, err := os.Stat(*outDir); os.IsNotExist(err) {
|
||||||
log.Debugf("output directory '%s' does not exist", *outDir)
|
logger.D("output directory '%s' does not exist", *outDir)
|
||||||
log.Debugf("trying to create output directory: %s", *outDir)
|
logger.D("trying to create output directory: %s", *outDir)
|
||||||
err := os.MkdirAll(*outDir, 0755)
|
err := os.MkdirAll(*outDir, 0755)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Panic(err)
|
logger.Log.Panic(err)
|
||||||
}
|
}
|
||||||
log.Noticef("created output directory: %s", *outDir)
|
logger.I("created output directory: %s", *outDir)
|
||||||
} else {
|
} else {
|
||||||
log.Noticef("output directory '%s' already exists", *outDir)
|
logger.I("output directory '%s' already exists", *outDir)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if fD, err := os.Stat(*outDir); os.IsNotExist(err) {
|
if fD, err := os.Stat(*outDir); os.IsNotExist(err) {
|
||||||
log.Panicf("output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
logger.Eexit(err, "output directory '%s' does not exist, try -create parameter or create manually", *outDir)
|
||||||
} else {
|
} else {
|
||||||
if fD == nil {
|
if fD == nil {
|
||||||
log.Panicf("something went wrong, could not get file handle for output dir %s", *outDir)
|
logger.P("something went wrong, could not get file handle for output dir %s", *outDir)
|
||||||
} else if !fD.IsDir() {
|
} else if !fD.IsDir() {
|
||||||
log.Panicf("output directory '%s' is not a directory", *outDir)
|
logger.Exit("output directory '%s' is not a directory", *outDir)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debug("reading global config...")
|
logger.D("reading global config...")
|
||||||
configFilename := *inDir + "/config.yml"
|
configFilename := *inDir + "/config.yml"
|
||||||
err := config.ReadGlobalConfig(configFilename)
|
err := mark2web.Config.ReadFromFile(configFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Panicf("could not read file '%s': %s", configFilename, err)
|
logger.Eexit(err, "could not read file '%s'", configFilename)
|
||||||
}
|
}
|
||||||
config.Config.Directories.Input = *inDir
|
mark2web.Config.Directories.Input = *inDir
|
||||||
config.Config.Directories.Output = *outDir
|
mark2web.Config.Directories.Output = *outDir
|
||||||
|
|
||||||
log.Debugf("reading input directory %s", *inDir)
|
logger.D("reading input directory %s", *inDir)
|
||||||
|
|
||||||
defaultTemplate := "base.html"
|
defaultTemplate := "base.html"
|
||||||
defaultInputFile := "README.md"
|
defaultInputFile := "README.md"
|
||||||
@ -104,40 +109,35 @@ func main() {
|
|||||||
defaultFilenameIgnore := "^_"
|
defaultFilenameIgnore := "^_"
|
||||||
defaultFilenameOutputExtension := "html"
|
defaultFilenameOutputExtension := "html"
|
||||||
|
|
||||||
defaultPathConfig := new(config.PathConfig)
|
defaultPathConfig := new(mark2web.PathConfig)
|
||||||
defaultPathConfig.Template = &defaultTemplate
|
defaultPathConfig.Template = &defaultTemplate
|
||||||
defaultPathConfig.Index = &config.IndexConfig{
|
defaultPathConfig.Index = &mark2web.IndexConfig{
|
||||||
InputFile: &defaultInputFile,
|
InputFile: &defaultInputFile,
|
||||||
OutputFile: &defaultOutputFile,
|
OutputFile: &defaultOutputFile,
|
||||||
}
|
}
|
||||||
defaultPathConfig.Path = &config.DirnameConfig{
|
defaultPathConfig.Path = &mark2web.DirnameConfig{
|
||||||
Strip: &defaultPathStrip,
|
Strip: &defaultPathStrip,
|
||||||
IgnoreForNav: &defaultPathIgnoreForNav,
|
IgnoreForNav: &defaultPathIgnoreForNav,
|
||||||
}
|
}
|
||||||
defaultPathConfig.Filename = &config.FilenameConfig{
|
defaultPathConfig.Filename = &mark2web.FilenameConfig{
|
||||||
Strip: &defaultFilenameStrip,
|
Strip: &defaultFilenameStrip,
|
||||||
Ignore: &defaultFilenameIgnore,
|
Ignore: &defaultFilenameIgnore,
|
||||||
OutputExtension: &defaultFilenameOutputExtension,
|
OutputExtension: &defaultFilenameOutputExtension,
|
||||||
}
|
}
|
||||||
defaultPathConfig.Imaging = &config.ImagingConfig{
|
defaultPathConfig.Imaging = &mark2web.ImagingConfig{
|
||||||
Width: 1920,
|
Width: 1920,
|
||||||
Height: 1920,
|
Height: 1920,
|
||||||
Process: "fit",
|
Process: "fit",
|
||||||
Quality: 75,
|
Quality: 75,
|
||||||
}
|
}
|
||||||
|
|
||||||
helper.ReadContentDir(*inDir+"/content", *outDir, "", defaultPathConfig, contentConfig)
|
filtersDir := *inDir + "/templates/filters"
|
||||||
//spew.Dump(contentConfig)
|
|
||||||
|
|
||||||
//spew.Dump(navMap)
|
|
||||||
|
|
||||||
templatesDir := *inDir + "/templates"
|
|
||||||
helper.SetTemplateDir(templatesDir)
|
|
||||||
filtersDir := templatesDir + "/filters"
|
|
||||||
if _, err := os.Stat(filtersDir); !os.IsNotExist(err) {
|
if _, err := os.Stat(filtersDir); !os.IsNotExist(err) {
|
||||||
helper.RegisterFilters(filtersDir)
|
filter.RegisterFilters(filtersDir)
|
||||||
}
|
}
|
||||||
helper.ProcessContent(contentConfig, contentConfig)
|
|
||||||
|
|
||||||
helper.ProcessAssets()
|
mark2web.Run(*inDir, *outDir, defaultPathConfig)
|
||||||
|
|
||||||
|
logger.N("done")
|
||||||
|
|
||||||
}
|
}
|
@ -1,2 +1,2 @@
|
|||||||
This:
|
This:
|
||||||
GoTo: main/home
|
GoTo: main/home/
|
@ -1,2 +1,2 @@
|
|||||||
This:
|
This:
|
||||||
GoTo: main/home
|
GoTo: main/home/
|
31
go.mod
Normal file
31
go.mod
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
module gitbase.de/apairon/mark2web
|
||||||
|
|
||||||
|
go 1.16
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/Depado/bfchroma v1.3.0
|
||||||
|
github.com/alecthomas/chroma v0.10.0 // indirect
|
||||||
|
github.com/andybalholm/brotli v1.0.4
|
||||||
|
github.com/davecgh/go-spew v1.1.1
|
||||||
|
github.com/ddliu/motto v0.3.1
|
||||||
|
github.com/disintegration/imaging v1.6.2
|
||||||
|
github.com/extemporalgenome/slug v0.0.0-20150414033109-0320c85e32e0
|
||||||
|
github.com/flosch/pongo2 v0.0.0-20200913210552-0d938eb266f3 // indirect
|
||||||
|
github.com/flosch/pongo2-addons v0.0.0-20210526150811-f969446c5b72
|
||||||
|
github.com/flosch/pongo2/v4 v4.0.2
|
||||||
|
github.com/gosuri/uilive v0.0.4 // indirect
|
||||||
|
github.com/gosuri/uiprogress v0.0.1
|
||||||
|
github.com/imdario/mergo v0.3.12
|
||||||
|
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||||
|
github.com/mattn/go-tty v0.0.4
|
||||||
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||||
|
github.com/otiai10/copy v1.7.0
|
||||||
|
github.com/robertkrimen/otto v0.0.0-20211024170158-b87d35c0b86f
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0
|
||||||
|
github.com/smartystreets/goconvey v1.7.2
|
||||||
|
golang.org/x/image v0.0.0-20211028202545-6944b10bf410 // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 // indirect
|
||||||
|
golang.org/x/text v0.3.7 // indirect
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
|
)
|
128
go.sum
Normal file
128
go.sum
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
github.com/Depado/bfchroma v1.3.0 h1:zz14vpvySU6S0CL6yGPr1vkFevQecIt8dJdCsMS2JpM=
|
||||||
|
github.com/Depado/bfchroma v1.3.0/go.mod h1:c0bFk0tFmT+clD3TIGurjWCfD/QV8/EebfM3JGr+98M=
|
||||||
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
|
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||||
|
github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek=
|
||||||
|
github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s=
|
||||||
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
|
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||||
|
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||||
|
github.com/alecthomas/repr v0.0.0-20200325044227-4184120f674c/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||||
|
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
|
||||||
|
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
|
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||||
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/ddliu/motto v0.3.1 h1:k2uMOMy/LGA1okqJhtuq0ajHhYEIr798qlBULt+1kWs=
|
||||||
|
github.com/ddliu/motto v0.3.1/go.mod h1:jhu/Dn9mRcDsZNeb2rCLApdM8OoTFV77Ti2DVQx1ltE=
|
||||||
|
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||||
|
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||||
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
|
github.com/extemporalgenome/slug v0.0.0-20150414033109-0320c85e32e0 h1:0A9+8DBvlpto0mr+SD1NadV5liSIAZkWnvyshwk88Bc=
|
||||||
|
github.com/extemporalgenome/slug v0.0.0-20150414033109-0320c85e32e0/go.mod h1:96eSBMO0aE2dcsEygXzIsvGyOf7bM5kWuqVCPEgwLEI=
|
||||||
|
github.com/flosch/go-humanize v0.0.0-20140728123800-3ba51eabe506 h1:tN043XK9BV76qc31Z2GACIO5Dsh99q21JtYmR2ltXBg=
|
||||||
|
github.com/flosch/go-humanize v0.0.0-20140728123800-3ba51eabe506/go.mod h1:pSiPkAThBLWmIzJ2fukUGkcxxWR4HoLT7Bp8/krrl5g=
|
||||||
|
github.com/flosch/pongo2 v0.0.0-20200529170236-5abacdfa4915/go.mod h1:fB4mx6dzqFinCxIf3a7Mf5yLk+18Bia9mPAnuejcvDA=
|
||||||
|
github.com/flosch/pongo2 v0.0.0-20200913210552-0d938eb266f3 h1:fmFk0Wt3bBxxwZnu48jqMdaOR/IZ4vdtJFuaFV8MpIE=
|
||||||
|
github.com/flosch/pongo2 v0.0.0-20200913210552-0d938eb266f3/go.mod h1:bJWSKrZyQvfTnb2OudyUjurSG4/edverV7n82+K3JiM=
|
||||||
|
github.com/flosch/pongo2-addons v0.0.0-20210526150811-f969446c5b72 h1:/P0QfDoOIxqUYm8SPcn1XOVczfMbMbo2irMEF3F/sB0=
|
||||||
|
github.com/flosch/pongo2-addons v0.0.0-20210526150811-f969446c5b72/go.mod h1:FRpCGvVuFk51gQzekI5CiH/J6Ir1U47H8nF23gsFDKU=
|
||||||
|
github.com/flosch/pongo2/v4 v4.0.2 h1:gv+5Pe3vaSVmiJvh/BZa82b7/00YUGm0PIyVVLop0Hw=
|
||||||
|
github.com/flosch/pongo2/v4 v4.0.2/go.mod h1:B5ObFANs/36VwxxlgKpdchIJHMvHB562PW+BWPhwZD8=
|
||||||
|
github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
|
github.com/gosuri/uilive v0.0.4 h1:hUEBpQDj8D8jXgtCdBu7sWsy5sbW/5GhuO8KBwJ2jyY=
|
||||||
|
github.com/gosuri/uilive v0.0.4/go.mod h1:V/epo5LjjlDE5RJUcqx8dbw+zc93y5Ya3yg8tfZ74VI=
|
||||||
|
github.com/gosuri/uiprogress v0.0.1 h1:0kpv/XY/qTmFWl/SkaJykZXrBBzwwadmW8fRb7RJSxw=
|
||||||
|
github.com/gosuri/uiprogress v0.0.1/go.mod h1:C1RTYn4Sc7iEyf6j8ft5dyoZ4212h8G1ol9QQluh5+0=
|
||||||
|
github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
|
||||||
|
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
|
||||||
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||||
|
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
|
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||||
|
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
|
||||||
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
|
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||||
|
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||||
|
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
|
github.com/mattn/go-tty v0.0.4 h1:NVikla9X8MN0SQAqCYzpGyXv0jY7MNl3HOWD2dkle7E=
|
||||||
|
github.com/mattn/go-tty v0.0.4/go.mod h1:u5GGXBtZU6RQoKV8gY5W6UhMudbR5vXnUe7j3pxse28=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
|
||||||
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||||
|
github.com/otiai10/copy v1.7.0 h1:hVoPiN+t+7d2nzzwMiDHPSOogsWAStewq3TwU05+clE=
|
||||||
|
github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U=
|
||||||
|
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
|
||||||
|
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
|
||||||
|
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
|
||||||
|
github.com/otiai10/mint v1.3.3 h1:7JgpsBaN0uMkyju4tbYHu0mnM55hNKVYLsXmwr15NQI=
|
||||||
|
github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
|
||||||
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/robertkrimen/otto v0.0.0-20211024170158-b87d35c0b86f h1:a7clxaGmmqtdNTXyvrp/lVO/Gnkzlhc/+dLs5v965GM=
|
||||||
|
github.com/robertkrimen/otto v0.0.0-20211024170158-b87d35c0b86f/go.mod h1:/mK7FZ3mFYEn9zvNPhpngTyatyehSwte5bJZ4ehL5Xw=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=
|
||||||
|
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
||||||
|
github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg=
|
||||||
|
github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
|
golang.org/x/image v0.0.0-20211028202545-6944b10bf410 h1:hTftEOvwiOq2+O8k2D5/Q7COC7k5Qcrgc2TFURJYnvQ=
|
||||||
|
golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||||
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 h1:nhht2DYV/Sn3qOayu8lM+cU1ii9sTLUeBQwQQfUHtrs=
|
||||||
|
golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
gopkg.in/readline.v1 v1.0.0-20160726135117-62c6fe619375/go.mod h1:lNEQeAhU009zbRxng+XOj5ITVgY24WcbNnQopyfKoYQ=
|
||||||
|
gopkg.in/sourcemap.v1 v1.0.5 h1:inv58fC9f9J3TK2Y2R1NPntXEn3/wjWHkonhIUODNTI=
|
||||||
|
gopkg.in/sourcemap.v1 v1.0.5/go.mod h1:2RlvNNSMglmRrcvhfuzp4hQHwOtjxlbjX7UPY/GXb78=
|
||||||
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
@ -1,28 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
|
||||||
cpy "github.com/otiai10/copy"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ProcessAssets copies the assets from input to output dir
|
|
||||||
func ProcessAssets() {
|
|
||||||
switch config.Config.Assets.Action {
|
|
||||||
case "copy":
|
|
||||||
from := config.Config.Assets.FromPath
|
|
||||||
to := config.Config.Assets.ToPath
|
|
||||||
if !strings.HasPrefix(from, "/") {
|
|
||||||
from = config.Config.Directories.Input + "/" + from
|
|
||||||
}
|
|
||||||
if !strings.HasPrefix(to, "/") {
|
|
||||||
to = config.Config.Directories.Output + "/" + to
|
|
||||||
}
|
|
||||||
Log.Noticef("copying assets from '%s' to '%s'", from, to)
|
|
||||||
err := cpy.Copy(from, to)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not copy assets from '%s' to '%s': %s", from, to, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,514 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
|
||||||
"github.com/Depado/bfchroma"
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
"github.com/extemporalgenome/slug"
|
|
||||||
"github.com/flosch/pongo2"
|
|
||||||
cpy "github.com/otiai10/copy"
|
|
||||||
"gopkg.in/russross/blackfriday.v2"
|
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func newContext() pongo2.Context {
|
|
||||||
return pongo2.Context{
|
|
||||||
"fnRequest": RequestFn,
|
|
||||||
"fnRender": RenderFn,
|
|
||||||
|
|
||||||
"AssetsPath": config.Config.Assets.ToPath,
|
|
||||||
|
|
||||||
"Timestamp": time.Now().Unix,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func fillNodeConfig(node *config.PathConfigTree, inBase, outBase, dir string, conf *config.PathConfig) {
|
|
||||||
inPath := inBase
|
|
||||||
if dir != "" {
|
|
||||||
inPath += "/" + dir
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Infof("reading input directory: %s", inPath)
|
|
||||||
|
|
||||||
node.InputPath = inPath
|
|
||||||
|
|
||||||
// read config
|
|
||||||
newConfig := new(config.PathConfig)
|
|
||||||
Log.Debug("looking for config.yml ...")
|
|
||||||
configFile := inPath + "/config.yml"
|
|
||||||
if _, err := os.Stat(configFile); os.IsNotExist(err) {
|
|
||||||
Log.Debug("no config.yml found in this directory, using upper configs")
|
|
||||||
config.Merge(newConfig, conf)
|
|
||||||
// remove this
|
|
||||||
newConfig.This = config.ThisPathConfig{}
|
|
||||||
} else {
|
|
||||||
Log.Debug("reading config...")
|
|
||||||
data, err := ioutil.ReadFile(configFile)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not read file '%s': %s", configFile, err)
|
|
||||||
}
|
|
||||||
err = yaml.Unmarshal(data, newConfig)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not parse YAML file '%s': %s", configFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Debug("merging config with upper config")
|
|
||||||
oldThis := newConfig.This
|
|
||||||
config.Merge(newConfig, conf)
|
|
||||||
newConfig.This = oldThis
|
|
||||||
|
|
||||||
Log.Debug(spew.Sdump(newConfig))
|
|
||||||
}
|
|
||||||
|
|
||||||
node.Config = newConfig
|
|
||||||
|
|
||||||
// calc outDir
|
|
||||||
stripedDir := dir
|
|
||||||
var regexStr *string
|
|
||||||
if newConfig.Path != nil {
|
|
||||||
regexStr = newConfig.Path.Strip
|
|
||||||
}
|
|
||||||
if regexStr != nil && *regexStr != "" {
|
|
||||||
if regex, err := regexp.Compile(*regexStr); err != nil {
|
|
||||||
Log.Panicf("error compiling path.strip regex '%s' from '%s': %s", *regexStr, inBase+"/"+dir, err)
|
|
||||||
} else {
|
|
||||||
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if node.Config.This.Navname == nil {
|
|
||||||
navname := strings.Replace(stripedDir, "_", " ", -1)
|
|
||||||
node.Config.This.Navname = &navname
|
|
||||||
}
|
|
||||||
|
|
||||||
stripedDir = slug.Slug(stripedDir)
|
|
||||||
outPath := outBase + "/" + stripedDir
|
|
||||||
outPath = path.Clean(outPath)
|
|
||||||
|
|
||||||
Log.Infof("calculated output directory: %s", outPath)
|
|
||||||
node.OutputPath = outPath
|
|
||||||
|
|
||||||
// handle collections
|
|
||||||
for _, colConfig := range newConfig.This.Collections {
|
|
||||||
if colConfig != nil {
|
|
||||||
if colConfig.Name == nil || *colConfig.Name == "" {
|
|
||||||
Log.Panicf("missing Name in collection config in '%s'", inPath)
|
|
||||||
}
|
|
||||||
if colConfig.URL == nil || *colConfig.URL == "" {
|
|
||||||
Log.Panicf("missing EntriesJSON in collection config in '%s'", inPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if node.ColMap == nil {
|
|
||||||
node.ColMap = make(config.MapString)
|
|
||||||
}
|
|
||||||
ctx := newContext()
|
|
||||||
ctx["This"] = node.Config.This
|
|
||||||
ctx["Data"] = node.Config.Data
|
|
||||||
|
|
||||||
url, err := pongo2.RenderTemplateString(*colConfig.URL, ctx)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for Collection Element.URL in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
colData := jsonWebRequest(url)
|
|
||||||
node.ColMap[*colConfig.Name] = colData
|
|
||||||
|
|
||||||
if navT := colConfig.NavTemplate; navT != nil {
|
|
||||||
var entries []interface{}
|
|
||||||
var ok bool
|
|
||||||
if navT.EntriesAttribute != "" {
|
|
||||||
var colDataMap map[string]interface{}
|
|
||||||
if colDataMap, ok = colData.(map[string]interface{}); ok {
|
|
||||||
entries, ok = colDataMap[navT.EntriesAttribute].([]interface{})
|
|
||||||
if !ok {
|
|
||||||
Log.Debug(spew.Sdump(colDataMap))
|
|
||||||
Log.Panicf("invalid json data in [%s] from url '%s' for entries", navT.EntriesAttribute, url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
entries, ok = colData.([]interface{})
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
Log.Debug(spew.Sdump(colData))
|
|
||||||
Log.Panicf("invalid json data from url '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", url)
|
|
||||||
}
|
|
||||||
|
|
||||||
// build navigation with detail sites
|
|
||||||
for idx, colEl := range entries {
|
|
||||||
ctxE := make(pongo2.Context)
|
|
||||||
err := config.Merge(&ctxE, ctx)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not merge context in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
var jsonCtx map[string]interface{}
|
|
||||||
if jsonCtx, ok = colEl.(map[string]interface{}); !ok {
|
|
||||||
Log.Debug(spew.Sdump(colEl))
|
|
||||||
Log.Panicf("no json object for entry index %d from url '%s'", idx, url)
|
|
||||||
}
|
|
||||||
err = config.Merge(&ctxE, pongo2.Context(jsonCtx))
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not merge context in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tpl := ""
|
|
||||||
if navT.Template != "" {
|
|
||||||
tpl, err = pongo2.RenderTemplateString(navT.Template, ctxE)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for NavTemplate.Template in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if tpl == "" {
|
|
||||||
tpl = *newConfig.Template
|
|
||||||
}
|
|
||||||
|
|
||||||
dataKey := ""
|
|
||||||
if navT.DataKey != "" {
|
|
||||||
dataKey, err = pongo2.RenderTemplateString(navT.DataKey, ctxE)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for NavTemplate.DataKey in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
goTo, err := pongo2.RenderTemplateString(navT.GoTo, ctxE)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for NavTemplate.GoTo in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
goTo = strings.Trim(goTo, "/")
|
|
||||||
goTo = path.Clean(goTo)
|
|
||||||
|
|
||||||
if strings.Contains(goTo, "..") {
|
|
||||||
Log.Panicf("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", inPath, goTo)
|
|
||||||
}
|
|
||||||
if goTo == "." {
|
|
||||||
Log.Panicf("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", inPath)
|
|
||||||
}
|
|
||||||
if goTo == "" {
|
|
||||||
Log.Panicf("missing NavTemplate.GoTo in collection config in '%s'", inPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
navname := ""
|
|
||||||
if navT.Navname != "" {
|
|
||||||
navname, err = pongo2.RenderTemplateString(navT.Navname, ctxE)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for NavTemplate.Navname in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
body := ""
|
|
||||||
if navT.Body != "" {
|
|
||||||
body, err = pongo2.RenderTemplateString(navT.Body, ctxE)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("invalid template string for NavTemplate.Body in '%s': %s", inPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
add2Nav(node, node.Config, tpl, goTo, navname, colEl, dataKey, body, navT.Hidden)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadContentDir walks through content directory and builds the tree of configurations
|
|
||||||
func ReadContentDir(inBase string, outBase string, dir string, conf *config.PathConfig, tree *config.PathConfigTree) {
|
|
||||||
fillNodeConfig(tree, inBase, outBase, dir, conf)
|
|
||||||
|
|
||||||
files, err := ioutil.ReadDir(tree.InputPath)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// first only files
|
|
||||||
for _, f := range files {
|
|
||||||
p := tree.InputPath + "/" + f.Name()
|
|
||||||
if !f.IsDir() && f.Name() != "config.yml" {
|
|
||||||
switch path.Ext(f.Name()) {
|
|
||||||
case ".md":
|
|
||||||
Log.Debugf(".MD %s", p)
|
|
||||||
if tree.InputFiles == nil {
|
|
||||||
tree.InputFiles = make([]string, 0)
|
|
||||||
}
|
|
||||||
tree.InputFiles = append(tree.InputFiles, f.Name())
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
Log.Debugf("FIL %s", p)
|
|
||||||
if tree.OtherFiles == nil {
|
|
||||||
tree.OtherFiles = make([]string, 0)
|
|
||||||
}
|
|
||||||
tree.OtherFiles = append(tree.OtherFiles, f.Name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// only directorys, needed config before
|
|
||||||
for _, f := range files {
|
|
||||||
p := tree.InputPath + "/" + f.Name()
|
|
||||||
if f.IsDir() {
|
|
||||||
Log.Debugf("DIR %s", p)
|
|
||||||
newTree := new(config.PathConfigTree)
|
|
||||||
if tree.Sub == nil {
|
|
||||||
tree.Sub = make([]*config.PathConfigTree, 0)
|
|
||||||
}
|
|
||||||
tree.Sub = append(tree.Sub, newTree)
|
|
||||||
ReadContentDir(tree.InputPath, tree.OutputPath, f.Name(), tree.Config, newTree)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProcessContent walks recursivly through the input paths and processes all files for output
|
|
||||||
func ProcessContent(rootConf, conf *config.PathConfigTree) {
|
|
||||||
CreateDirectory(conf.OutputPath)
|
|
||||||
|
|
||||||
curNavPath := strings.TrimPrefix(conf.OutputPath, config.Config.Directories.Output)
|
|
||||||
curNavPath = strings.TrimPrefix(curNavPath, "/")
|
|
||||||
curNavPath = path.Clean(curNavPath)
|
|
||||||
if curNavPath == "." {
|
|
||||||
curNavPath = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
goTo := conf.Config.This.GoTo
|
|
||||||
if goTo != nil && *goTo != "" {
|
|
||||||
goToFixed := *goTo
|
|
||||||
if strings.HasPrefix(goToFixed, "/") {
|
|
||||||
goToFixed = BackToRoot(curNavPath) + goToFixed
|
|
||||||
}
|
|
||||||
goToFixed = path.Clean(goToFixed)
|
|
||||||
|
|
||||||
switch config.Config.Webserver.Type {
|
|
||||||
case "apache":
|
|
||||||
htaccessFile := conf.OutputPath + "/.htaccess"
|
|
||||||
Log.Noticef("writing '%s' with redirect to: %s", htaccessFile, goToFixed)
|
|
||||||
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
|
|
||||||
RewriteRule ^$ %{REQUEST_URI}`+goToFixed+`/ [R,L]
|
|
||||||
`), 0644)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not write '%s': %s", htaccessFile, err)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, file := range conf.InputFiles {
|
|
||||||
var input []byte
|
|
||||||
inFile := "InputString"
|
|
||||||
|
|
||||||
if file != "" {
|
|
||||||
inFile = conf.InputPath + "/" + file
|
|
||||||
Log.Debugf("reading file: %s", inFile)
|
|
||||||
|
|
||||||
var err error
|
|
||||||
input, err = ioutil.ReadFile(inFile)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not read '%s':%s", inFile, err)
|
|
||||||
}
|
|
||||||
Log.Infof("processing input file '%s'", inFile)
|
|
||||||
} else {
|
|
||||||
// use input string if available and input filename == ""
|
|
||||||
var inputString *string
|
|
||||||
if i := conf.Config.Index; i != nil {
|
|
||||||
inputString = i.InputString
|
|
||||||
}
|
|
||||||
if inputString != nil {
|
|
||||||
Log.Debugf("using input string instead of file")
|
|
||||||
input = []byte(*inputString)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
newConfig := new(config.PathConfig)
|
|
||||||
|
|
||||||
regex := regexp.MustCompile("(?s)^---(.*?)\\r?\\n\\r?---\\r?\\n\\r?")
|
|
||||||
yamlData := regex.Find(input)
|
|
||||||
if string(yamlData) != "" {
|
|
||||||
// replace tabs
|
|
||||||
yamlData = bytes.Replace(yamlData, []byte("\t"), []byte(" "), -1)
|
|
||||||
|
|
||||||
Log.Debugf("found yaml header in '%s', merging config", inFile)
|
|
||||||
err := yaml.Unmarshal(yamlData, newConfig)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not parse YAML header from '%s': %s", inFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Debug("merging config with upper config")
|
|
||||||
oldThis := newConfig.This
|
|
||||||
config.Merge(newConfig, conf.Config)
|
|
||||||
newConfig.This = oldThis
|
|
||||||
|
|
||||||
Log.Debug(spew.Sdump(newConfig))
|
|
||||||
|
|
||||||
input = regex.ReplaceAll(input, []byte(""))
|
|
||||||
} else {
|
|
||||||
config.Merge(newConfig, conf.Config)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore ???
|
|
||||||
ignoreFile := false
|
|
||||||
var ignoreRegex *string
|
|
||||||
var stripRegex *string
|
|
||||||
var outputExt *string
|
|
||||||
if f := newConfig.Filename; f != nil {
|
|
||||||
ignoreRegex = f.Ignore
|
|
||||||
stripRegex = f.Strip
|
|
||||||
outputExt = f.OutputExtension
|
|
||||||
}
|
|
||||||
if ignoreRegex != nil && *ignoreRegex != "" {
|
|
||||||
regex, err := regexp.Compile(*ignoreRegex)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not compile filename.ignore regexp '%s' for file '%s': %s", *ignoreRegex, inFile, err)
|
|
||||||
}
|
|
||||||
ignoreFile = regex.MatchString(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
if ignoreFile {
|
|
||||||
Log.Infof("ignoring file '%s', because of filename.ignore", inFile)
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// build output filename
|
|
||||||
outputFilename := file
|
|
||||||
|
|
||||||
var indexInputFile *string
|
|
||||||
var indexOutputFile *string
|
|
||||||
if i := newConfig.Index; i != nil {
|
|
||||||
indexInputFile = i.InputFile
|
|
||||||
indexOutputFile = i.OutputFile
|
|
||||||
}
|
|
||||||
|
|
||||||
if indexInputFile != nil &&
|
|
||||||
*indexInputFile == file &&
|
|
||||||
indexOutputFile != nil &&
|
|
||||||
*indexOutputFile != "" {
|
|
||||||
outputFilename = *indexOutputFile
|
|
||||||
} else {
|
|
||||||
if stripRegex != nil && *stripRegex != "" {
|
|
||||||
regex, err := regexp.Compile(*stripRegex)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not compile filename.strip regexp '%s' for file '%s': %s", *stripRegex, inFile, err)
|
|
||||||
}
|
|
||||||
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
|
|
||||||
}
|
|
||||||
if outputExt != nil && *outputExt != "" {
|
|
||||||
outputFilename += "." + *outputExt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
outFile := conf.OutputPath + "/" + outputFilename
|
|
||||||
Log.Debugf("using '%s' as output file", outFile)
|
|
||||||
|
|
||||||
// use --- for splitting document in markdown parts
|
|
||||||
regex := regexp.MustCompile("\\r?\\n\\r?---\\r?\\n\\r?")
|
|
||||||
inputParts := regex.Split(string(input), -1)
|
|
||||||
htmlParts := make([]*pongo2.Value, 0)
|
|
||||||
for _, iPart := range inputParts {
|
|
||||||
htmlParts = append(htmlParts,
|
|
||||||
pongo2.AsSafeValue(
|
|
||||||
string(renderMarkdown([]byte(iPart), newConfig.Markdown))))
|
|
||||||
}
|
|
||||||
|
|
||||||
// build navigation
|
|
||||||
navMap := make(map[string]*NavElement)
|
|
||||||
navSlice := make([]*NavElement, 0)
|
|
||||||
navActive := make([]*NavElement, 0)
|
|
||||||
BuildNavigation(rootConf, &navMap, &navSlice, &navActive, curNavPath)
|
|
||||||
|
|
||||||
// read yaml header as data for template
|
|
||||||
ctx := newContext()
|
|
||||||
ctx["This"] = newConfig.This
|
|
||||||
ctx["Meta"] = newConfig.Meta
|
|
||||||
ctx["Data"] = newConfig.Data
|
|
||||||
ctx["ColMap"] = rootConf.ColMap // root as NavMap and NavSlice, for sub go to NavElement.ColMap
|
|
||||||
ctx["NavMap"] = navMap
|
|
||||||
ctx["NavSlice"] = navSlice
|
|
||||||
ctx["NavActive"] = navActive
|
|
||||||
ctx["Body"] = pongo2.AsSafeValue(string(renderMarkdown(input, newConfig.Markdown)))
|
|
||||||
ctx["BodyParts"] = htmlParts
|
|
||||||
ctx["CurrentPath"] = curNavPath
|
|
||||||
// set active nav element
|
|
||||||
if len(navActive) > 0 {
|
|
||||||
ctx["NavElement"] = navActive[len(navActive)-1]
|
|
||||||
} else {
|
|
||||||
// if no active path to content, we are in root dir
|
|
||||||
ctx["NavElement"] = &NavElement{
|
|
||||||
GoTo: BackToRoot(curNavPath),
|
|
||||||
Active: true,
|
|
||||||
ColMap: rootConf.ColMap,
|
|
||||||
Data: rootConf.Config.Data,
|
|
||||||
This: rootConf.Config.This,
|
|
||||||
SubMap: &navMap,
|
|
||||||
SubSlice: &navSlice,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Debugf("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
|
||||||
templateFilename := *newConfig.Template
|
|
||||||
result, err := RenderTemplate(*newConfig.Template, conf, newConfig, &ctx)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not execute template '%s' for input file '%s': %s", templateFilename, inFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
result = FixAssetsPath(result, curNavPath)
|
|
||||||
|
|
||||||
Log.Noticef("writing to output file: %s", outFile)
|
|
||||||
err = ioutil.WriteFile(outFile, []byte(result), 0644)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not write to output file '%s': %s", outFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
//fmt.Println(string(html))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// process other files, copy...
|
|
||||||
for _, file := range conf.OtherFiles {
|
|
||||||
switch config.Config.OtherFiles.Action {
|
|
||||||
case "copy":
|
|
||||||
from := conf.InputPath + "/" + file
|
|
||||||
to := conf.OutputPath + "/" + file
|
|
||||||
Log.Noticef("copying file from '%s' to '%s'", from, to)
|
|
||||||
err := cpy.Copy(from, to)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not copy file from '%s' to '%s': %s", from, to, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
i := 0
|
|
||||||
for i < len(conf.Sub) {
|
|
||||||
ProcessContent(rootConf, conf.Sub[i])
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func renderMarkdown(input []byte, markdownConf *config.MarkdownConfig) []byte {
|
|
||||||
var options []blackfriday.Option
|
|
||||||
|
|
||||||
var chromaRenderer *bool
|
|
||||||
var chromaStyle *string
|
|
||||||
if m := markdownConf; m != nil {
|
|
||||||
chromaRenderer = m.ChromaRenderer
|
|
||||||
chromaStyle = m.ChromaStyle
|
|
||||||
}
|
|
||||||
if chromaStyle == nil {
|
|
||||||
style := "monokai"
|
|
||||||
chromaStyle = &style
|
|
||||||
}
|
|
||||||
if chromaRenderer != nil && *chromaRenderer {
|
|
||||||
options = []blackfriday.Option{
|
|
||||||
blackfriday.WithRenderer(
|
|
||||||
bfchroma.NewRenderer(
|
|
||||||
bfchroma.Style(*chromaStyle),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// fix \r from markdown for blackfriday
|
|
||||||
input = bytes.Replace(input, []byte("\r"), []byte(""), -1)
|
|
||||||
return blackfriday.Run(input, options...)
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import "os"
|
|
||||||
|
|
||||||
// CreateDirectory creates direcory with all missing parents and panic if error
|
|
||||||
func CreateDirectory(dir string) {
|
|
||||||
Log.Debugf("trying to create output directory: %s", dir)
|
|
||||||
|
|
||||||
if dirH, err := os.Stat(dir); os.IsNotExist(err) {
|
|
||||||
err := os.MkdirAll(dir, 0755)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not create output directory '%s': %s", dir, err)
|
|
||||||
}
|
|
||||||
Log.Noticef("created output directory: %s", dir)
|
|
||||||
} else if dirH != nil {
|
|
||||||
if dirH.IsDir() {
|
|
||||||
Log.Noticef("output directory '%s' already exists", dir)
|
|
||||||
} else {
|
|
||||||
Log.Panicf("output directory '%s' is no directory", dir)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Log.Panicf("unknown error for output directory '%s': %s", dir, err)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
"github.com/op/go-logging"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Log is global logger
|
|
||||||
var Log = logging.MustGetLogger("myLogger")
|
|
||||||
|
|
||||||
// ConfigureLogger sets logger backend and level
|
|
||||||
func ConfigureLogger(level string) {
|
|
||||||
logBackend := logging.NewLogBackend(os.Stderr, "", 0)
|
|
||||||
logBackendFormatter := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(
|
|
||||||
`%{color}%{time:15:04:05.000} %{shortfunc} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,
|
|
||||||
))
|
|
||||||
logBackendLeveled := logging.AddModuleLevel(logBackendFormatter)
|
|
||||||
logBackendLevel := logging.INFO
|
|
||||||
switch level {
|
|
||||||
case "debug":
|
|
||||||
logBackendLevel = logging.DEBUG
|
|
||||||
break
|
|
||||||
|
|
||||||
case "info":
|
|
||||||
logBackendLevel = logging.INFO
|
|
||||||
break
|
|
||||||
|
|
||||||
case "notice":
|
|
||||||
logBackendLevel = logging.NOTICE
|
|
||||||
break
|
|
||||||
|
|
||||||
case "warning":
|
|
||||||
logBackendLevel = logging.WARNING
|
|
||||||
break
|
|
||||||
|
|
||||||
case "error":
|
|
||||||
logBackendLevel = logging.ERROR
|
|
||||||
break
|
|
||||||
|
|
||||||
}
|
|
||||||
logBackendLeveled.SetLevel(logBackendLevel, "")
|
|
||||||
logging.SetBackend(logBackendLeveled)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
spew.Config.DisablePointerAddresses = true
|
|
||||||
spew.Config.DisableCapacities = true
|
|
||||||
spew.Config.DisableMethods = true
|
|
||||||
spew.Config.DisablePointerMethods = true
|
|
||||||
}
|
|
103
helper/render.go
103
helper/render.go
@ -1,103 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
|
||||||
"github.com/flosch/pongo2"
|
|
||||||
)
|
|
||||||
|
|
||||||
var templateCache = make(map[string]*pongo2.Template)
|
|
||||||
var currentContext *pongo2.Context
|
|
||||||
var currentTreeNodeConfig *config.PathConfigTree
|
|
||||||
var currentPathConfig *config.PathConfig
|
|
||||||
var templateDir string
|
|
||||||
|
|
||||||
// BackToRoot builds ../../ string
|
|
||||||
func BackToRoot(curNavPath string) string {
|
|
||||||
tmpPath := ""
|
|
||||||
if curNavPath != "" {
|
|
||||||
for i := strings.Count(curNavPath, "/") + 1; i > 0; i-- {
|
|
||||||
tmpPath += "../"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tmpPath
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveNavPath fixes nav target relative to current navigation path
|
|
||||||
func ResolveNavPath(target string) string {
|
|
||||||
curNavPath := (*currentContext)["CurrentPath"].(string)
|
|
||||||
if strings.HasPrefix(target, "/") {
|
|
||||||
target = BackToRoot(curNavPath) + target
|
|
||||||
}
|
|
||||||
target = path.Clean(target)
|
|
||||||
return target
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveOutputPath fixes output directory relative to current navigation path
|
|
||||||
func ResolveOutputPath(target string) string {
|
|
||||||
if strings.HasPrefix(target, "/") {
|
|
||||||
target = config.Config.Directories.Output + "/" + target
|
|
||||||
} else {
|
|
||||||
target = currentTreeNodeConfig.OutputPath + "/" + target
|
|
||||||
}
|
|
||||||
return path.Clean(target)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveInputPath fixes input directory relative to current navigation path
|
|
||||||
func ResolveInputPath(target string) string {
|
|
||||||
if strings.HasPrefix(target, "/") {
|
|
||||||
target = config.Config.Directories.Input + "/" + target
|
|
||||||
} else {
|
|
||||||
target = currentTreeNodeConfig.InputPath + "/" + target
|
|
||||||
}
|
|
||||||
return path.Clean(target)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetTemplateDir sets base directory for searching template files
|
|
||||||
func SetTemplateDir(dir string) {
|
|
||||||
templateDir = dir
|
|
||||||
}
|
|
||||||
|
|
||||||
// RenderTemplate renders a pongo2 template with context
|
|
||||||
func RenderTemplate(filename string, treeNodeConfig *config.PathConfigTree, pathConfig *config.PathConfig, ctx *pongo2.Context) (string, error) {
|
|
||||||
currentContext = ctx
|
|
||||||
currentTreeNodeConfig = treeNodeConfig
|
|
||||||
currentPathConfig = pathConfig
|
|
||||||
templateFile := templateDir + "/" + filename
|
|
||||||
template := templateCache[templateFile]
|
|
||||||
if template == nil {
|
|
||||||
var err error
|
|
||||||
if template, err = pongo2.FromFile(templateFile); err != nil {
|
|
||||||
log.Panicf("could not parse template '%s': %s", templateFile, err)
|
|
||||||
} else {
|
|
||||||
templateCache[templateFile] = template
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return template.Execute(*ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixAssetsPath replaces assets path based on current path
|
|
||||||
func FixAssetsPath(str, curNavPath string) string {
|
|
||||||
if find := config.Config.Assets.FixTemplate.Find; find != "" {
|
|
||||||
Log.Debugf("fixing assets paths for path '%s'", curNavPath)
|
|
||||||
repl := config.Config.Assets.FixTemplate.Replace
|
|
||||||
toPath := config.Config.Assets.ToPath
|
|
||||||
|
|
||||||
bToRoot := BackToRoot(curNavPath)
|
|
||||||
regex, err := regexp.Compile(find)
|
|
||||||
if err != nil {
|
|
||||||
log.Panicf("could not compile regexp '%s' for assets path: %s", find, err)
|
|
||||||
}
|
|
||||||
repl = bToRoot + toPath + "/" + repl
|
|
||||||
repl = path.Clean(repl) + "/"
|
|
||||||
Log.Debugf("new assets paths: %s", repl)
|
|
||||||
return regex.ReplaceAllString(str, repl)
|
|
||||||
}
|
|
||||||
|
|
||||||
return str
|
|
||||||
}
|
|
@ -1,377 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/md5"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"image"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
"github.com/ddliu/motto"
|
|
||||||
"github.com/disintegration/imaging"
|
|
||||||
"github.com/flosch/pongo2"
|
|
||||||
_ "github.com/flosch/pongo2-addons"
|
|
||||||
_ "github.com/robertkrimen/otto/underscore"
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
err := pongo2.ReplaceFilter("markdown", MarkdownFilter)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
newFilters := map[string]pongo2.FilterFunction{
|
|
||||||
"image_process": ImageProcessFilter,
|
|
||||||
"relative_path": RelativePathFilter,
|
|
||||||
"json": JSONFilter,
|
|
||||||
"dump": DumpFilter,
|
|
||||||
}
|
|
||||||
for name, fn := range newFilters {
|
|
||||||
err := pongo2.RegisterFilter(name, fn)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DumpFilter is a pongo2 filter, which returns a spew.Dump of the input
|
|
||||||
func DumpFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
|
||||||
dumpString := spew.Sdump(in.Interface())
|
|
||||||
return pongo2.AsValue(string(dumpString)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSONFilter is a pongo2 filter, which returns a json string of the input
|
|
||||||
func JSONFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
|
||||||
pretty := false
|
|
||||||
for _, s := range strings.Split(param.String(), ",") {
|
|
||||||
switch s {
|
|
||||||
case "pretty":
|
|
||||||
pretty = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
var jsonBytes []byte
|
|
||||||
if pretty {
|
|
||||||
jsonBytes, err = json.MarshalIndent(in.Interface(), "", " ")
|
|
||||||
|
|
||||||
} else {
|
|
||||||
jsonBytes, err = json.Marshal(in.Interface())
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:json",
|
|
||||||
OrigError: err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return pongo2.AsSafeValue(string(jsonBytes)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkdownFilter is a pongo2 filter, which converts markdown to html
|
|
||||||
func MarkdownFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
|
||||||
return pongo2.AsSafeValue(
|
|
||||||
string(
|
|
||||||
renderMarkdown(
|
|
||||||
[]byte(in.String()),
|
|
||||||
currentPathConfig.Markdown,
|
|
||||||
))),
|
|
||||||
nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseImageParams(str string) (*config.ImagingConfig, error) {
|
|
||||||
p := config.ImagingConfig{}
|
|
||||||
if str == "" {
|
|
||||||
config.Merge(&p, currentPathConfig.Imaging)
|
|
||||||
// Filename and Format are only valid for current image
|
|
||||||
p.Filename = ""
|
|
||||||
p.Format = ""
|
|
||||||
return &p, nil
|
|
||||||
}
|
|
||||||
for _, s := range strings.Split(str, ",") {
|
|
||||||
e := strings.Split(s, "=")
|
|
||||||
if len(e) < 2 {
|
|
||||||
return nil, fmt.Errorf("invalid image parameter: %s", s)
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
switch e[0] {
|
|
||||||
case "w":
|
|
||||||
p.Width, err = strconv.Atoi(e[1])
|
|
||||||
case "h":
|
|
||||||
p.Height, err = strconv.Atoi(e[1])
|
|
||||||
case "f":
|
|
||||||
p.Filename = e[1]
|
|
||||||
case "t":
|
|
||||||
p.TargetDir = e[1]
|
|
||||||
case "p":
|
|
||||||
p.Process = e[1]
|
|
||||||
case "a":
|
|
||||||
p.Anchor = e[1]
|
|
||||||
case "q":
|
|
||||||
p.Quality, err = strconv.Atoi(e[1])
|
|
||||||
if p.Quality < 0 || p.Quality > 100 {
|
|
||||||
err = errors.New("q= must be between 1 and 100")
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("invalid image parameter: %s", s)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("could not convert image parameter to correct value type for '%s': %s", s, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &p, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getImageFromURL(url string) (image.Image, string, error) {
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, "", fmt.Errorf("could not get url '%s': %s", url, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
img, format, err := image.Decode(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, "", fmt.Errorf("could read body from url '%s': %s", url, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return img, format, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ImageProcessFilter read the image url and process parameters and saves the resized/processed image
|
|
||||||
// param: w=WITDH,h=HEIGHT
|
|
||||||
func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
|
||||||
imgSource := in.String()
|
|
||||||
p, err := parseImageParams(param.String())
|
|
||||||
if err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if p == nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: errors.New("no imaging config defined"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var img image.Image
|
|
||||||
if p.Process == "" {
|
|
||||||
p.Process = "resize"
|
|
||||||
}
|
|
||||||
filePrefix := fmt.Sprintf(
|
|
||||||
"%s_%dx%d_q%03d",
|
|
||||||
p.Process,
|
|
||||||
p.Width,
|
|
||||||
p.Height,
|
|
||||||
p.Quality,
|
|
||||||
)
|
|
||||||
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
|
|
||||||
// remote file
|
|
||||||
img, p.Format, err = getImageFromURL(imgSource)
|
|
||||||
if err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("could not open image '%s': %s", imgSource, err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// build filename
|
|
||||||
if p.Filename == "" {
|
|
||||||
var fBase string
|
|
||||||
if u, _ := url.Parse(imgSource); u != nil {
|
|
||||||
fBase = strings.Split(path.Base(u.Path), ".")[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
p.Filename = fmt.Sprintf(
|
|
||||||
"%s_%x_%s.%s",
|
|
||||||
filePrefix,
|
|
||||||
md5.Sum([]byte(imgSource)),
|
|
||||||
fBase,
|
|
||||||
p.Format,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// local file
|
|
||||||
imgSource = ResolveInputPath(imgSource)
|
|
||||||
if p.Filename == "" {
|
|
||||||
p.Filename = fmt.Sprintf(
|
|
||||||
"%s_%s",
|
|
||||||
filePrefix,
|
|
||||||
path.Base(imgSource),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var imgTarget string
|
|
||||||
if p.TargetDir != "" {
|
|
||||||
imgTarget = ResolveOutputPath(
|
|
||||||
path.Clean(p.TargetDir) + "/" +
|
|
||||||
p.Filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
pt := path.Dir(imgTarget)
|
|
||||||
if _, err := os.Stat(pt); os.IsNotExist(err) {
|
|
||||||
Log.Infof("create image target dir: %s", pt)
|
|
||||||
if err := os.MkdirAll(pt, 0755); err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("could not create image target dir '%s': %s", pt, err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.Filename = ResolveNavPath(p.TargetDir + "/" + p.Filename)
|
|
||||||
|
|
||||||
} else {
|
|
||||||
imgTarget = ResolveOutputPath(p.Filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
if f, err := os.Stat(imgTarget); err == nil && !f.IsDir() {
|
|
||||||
Log.Noticef("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
|
|
||||||
} else {
|
|
||||||
Log.Noticef("processing image from %s to %s", imgSource, imgTarget)
|
|
||||||
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
|
|
||||||
// webrequest before finding target filename, because of file format in filename
|
|
||||||
} else {
|
|
||||||
img, err = imaging.Open(imgSource, imaging.AutoOrientation(true))
|
|
||||||
if err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("could not open image '%s': %s", imgSource, err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch p.Process {
|
|
||||||
case "resize":
|
|
||||||
img = imaging.Resize(img, p.Width, p.Height, imaging.Lanczos)
|
|
||||||
case "fit":
|
|
||||||
img = imaging.Fit(img, p.Width, p.Height, imaging.Lanczos)
|
|
||||||
case "fill":
|
|
||||||
var anchor imaging.Anchor
|
|
||||||
switch strings.ToLower(p.Anchor) {
|
|
||||||
case "":
|
|
||||||
fallthrough
|
|
||||||
case "center":
|
|
||||||
anchor = imaging.Center
|
|
||||||
case "topleft":
|
|
||||||
anchor = imaging.TopLeft
|
|
||||||
case "top":
|
|
||||||
anchor = imaging.Top
|
|
||||||
case "topright":
|
|
||||||
anchor = imaging.TopRight
|
|
||||||
case "left":
|
|
||||||
anchor = imaging.Left
|
|
||||||
case "right":
|
|
||||||
anchor = imaging.Right
|
|
||||||
case "bottomleft":
|
|
||||||
anchor = imaging.BottomLeft
|
|
||||||
case "bottom":
|
|
||||||
anchor = imaging.Bottom
|
|
||||||
case "bottomright":
|
|
||||||
anchor = imaging.BottomRight
|
|
||||||
default:
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("unknown anchor a=%s definition", p.Anchor),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
img = imaging.Fill(img, p.Width, p.Height, anchor, imaging.Lanczos)
|
|
||||||
default:
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("invalid p parameter '%s'", p.Process),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var encodeOptions = make([]imaging.EncodeOption, 0)
|
|
||||||
if p.Quality > 0 {
|
|
||||||
encodeOptions = append(encodeOptions, imaging.JPEGQuality(p.Quality))
|
|
||||||
}
|
|
||||||
|
|
||||||
err = imaging.Save(img, imgTarget, encodeOptions...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, &pongo2.Error{
|
|
||||||
Sender: "filter:image_resize",
|
|
||||||
OrigError: fmt.Errorf("could save image '%s': %s", imgTarget, err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pongo2.AsValue(ResolveNavPath(p.Filename)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RelativePathFilter returns the relative path to navpoint based on current nav
|
|
||||||
func RelativePathFilter(in, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
|
||||||
return pongo2.AsValue(
|
|
||||||
ResolveNavPath(
|
|
||||||
in.String(),
|
|
||||||
),
|
|
||||||
), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RegisterFilters reads a directory and register filters from files within it
|
|
||||||
func RegisterFilters(dir string) {
|
|
||||||
files, err := ioutil.ReadDir(dir)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not read from template filters dir '%s': %s", dir, err)
|
|
||||||
}
|
|
||||||
for _, f := range files {
|
|
||||||
if !f.IsDir() {
|
|
||||||
switch path.Ext(f.Name()) {
|
|
||||||
case ".js":
|
|
||||||
fileBase := strings.TrimSuffix(f.Name(), ".js")
|
|
||||||
jsFile := dir + "/" + f.Name()
|
|
||||||
Log.Debugf("trying to register filter from: %s", jsFile)
|
|
||||||
/*
|
|
||||||
jsStr, err := ioutil.ReadFile(jsFile)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not read '%s': %s", jsFile, err)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
vm := motto.New()
|
|
||||||
fn, err := vm.Run(jsFile)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("error in javascript vm for '%s': %s", jsFile, err)
|
|
||||||
}
|
|
||||||
if !fn.IsFunction() {
|
|
||||||
Log.Panicf("%s does not contain a function code", jsFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = pongo2.RegisterFilter(
|
|
||||||
fileBase,
|
|
||||||
func(in, param *pongo2.Value) (out *pongo2.Value, erro *pongo2.Error) {
|
|
||||||
thisObj, _ := vm.Object("({})")
|
|
||||||
if currentContext != nil {
|
|
||||||
thisObj.Set("context", *currentContext)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not set context as in '%s': %s", jsFile, err)
|
|
||||||
}
|
|
||||||
ret, err := fn.Call(thisObj.Value(), in.Interface(), param.Interface())
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("error in javascript file '%s' while calling returned function: %s", jsFile, err)
|
|
||||||
}
|
|
||||||
retGo, err := ret.Export()
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("export error for '%s': %s", jsFile, err)
|
|
||||||
}
|
|
||||||
return pongo2.AsValue(retGo), nil
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not register filter from '%s': %s", jsFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,125 +0,0 @@
|
|||||||
package helper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
|
||||||
"github.com/flosch/pongo2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func jsonWebRequest(url string) interface{} {
|
|
||||||
Log.Noticef("requesting url via GET %s", url)
|
|
||||||
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not get url '%s': %s", url, err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
Log.Panicf("could not read body from url '%s': %s", url, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Debugf("output from url '%s':\n%s", url, string(body))
|
|
||||||
|
|
||||||
if resp.StatusCode >= 400 {
|
|
||||||
Log.Panicf("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
|
|
||||||
}
|
|
||||||
|
|
||||||
contentType := resp.Header.Get("Content-Type")
|
|
||||||
|
|
||||||
if strings.Contains(contentType, "json") {
|
|
||||||
|
|
||||||
} else {
|
|
||||||
Log.Panicf("is not json '%s' from url '%s'", contentType, url)
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonMap := make(map[string]interface{})
|
|
||||||
err = json.Unmarshal(body, &jsonMap)
|
|
||||||
if err == nil {
|
|
||||||
return jsonMap
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonArrayMap := make([]map[string]interface{}, 0)
|
|
||||||
err = json.Unmarshal(body, &jsonArrayMap)
|
|
||||||
if err == nil {
|
|
||||||
return jsonArrayMap
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.Panicf("could not read json from '%s': invalid type", url)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequestFn will make a web request and returns map[string]interface form pongo2
|
|
||||||
func RequestFn(url *pongo2.Value, args ...*pongo2.Value) *pongo2.Value {
|
|
||||||
u := url.String()
|
|
||||||
return pongo2.AsValue(jsonWebRequest(u))
|
|
||||||
}
|
|
||||||
|
|
||||||
func add2Nav(currentNode *config.PathConfigTree, pathConfig *config.PathConfig, tplFilename, outDir string, navname string, ctx interface{}, dataMapKey string, body string, hidden bool) {
|
|
||||||
newNodeConfig := new(config.PathConfigTree)
|
|
||||||
fillNodeConfig(
|
|
||||||
newNodeConfig,
|
|
||||||
currentNode.InputPath,
|
|
||||||
currentNode.OutputPath,
|
|
||||||
outDir,
|
|
||||||
pathConfig,
|
|
||||||
)
|
|
||||||
if navname != "" {
|
|
||||||
newNodeConfig.Config.This = config.ThisPathConfig{
|
|
||||||
Navname: &navname,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if dataMapKey != "" {
|
|
||||||
if newNodeConfig.Config.Data == nil {
|
|
||||||
newNodeConfig.Config.Data = make(config.MapString)
|
|
||||||
}
|
|
||||||
// as submap in Data
|
|
||||||
newNodeConfig.Config.Data[dataMapKey] = ctx
|
|
||||||
} else if m, ok := ctx.(map[string]interface{}); ok {
|
|
||||||
// direct set data
|
|
||||||
newNodeConfig.Config.Data = m
|
|
||||||
}
|
|
||||||
|
|
||||||
// fake via normal file behavior
|
|
||||||
newNodeConfig.Config.Template = &tplFilename
|
|
||||||
newNodeConfig.InputFiles = []string{""} // empty file is special for use InputString
|
|
||||||
indexInFile := ""
|
|
||||||
indexOutFile := "index.html"
|
|
||||||
if idx := newNodeConfig.Config.Index; idx != nil {
|
|
||||||
if idx.OutputFile != nil && *idx.OutputFile != "" {
|
|
||||||
indexOutFile = *idx.OutputFile
|
|
||||||
}
|
|
||||||
}
|
|
||||||
newNodeConfig.Config.Index = &config.IndexConfig{
|
|
||||||
InputFile: &indexInFile,
|
|
||||||
OutputFile: &indexOutFile,
|
|
||||||
InputString: &body,
|
|
||||||
}
|
|
||||||
newNodeConfig.Hidden = hidden
|
|
||||||
|
|
||||||
currentNode.Sub = append(currentNode.Sub, newNodeConfig)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RenderFn renders a pongo2 template with additional context
|
|
||||||
func RenderFn(templateFilename, outDir, ctx *pongo2.Value, param ...*pongo2.Value) *pongo2.Value {
|
|
||||||
dataMapKey := ""
|
|
||||||
body := ""
|
|
||||||
|
|
||||||
for i, p := range param {
|
|
||||||
switch i {
|
|
||||||
case 0:
|
|
||||||
dataMapKey = p.String()
|
|
||||||
case 1:
|
|
||||||
body = p.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
add2Nav(currentTreeNodeConfig, currentPathConfig, templateFilename.String(), outDir.String(), "", ctx.Interface(), dataMapKey, body, true)
|
|
||||||
|
|
||||||
return pongo2.AsValue(nil)
|
|
||||||
}
|
|
63
pkg/filter/custom.go
Normal file
63
pkg/filter/custom.go
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
"github.com/ddliu/motto"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
_ "github.com/robertkrimen/otto/underscore"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RegisterFilters reads a directory and register filters from files within it
|
||||||
|
func RegisterFilters(dir string) {
|
||||||
|
files, err := ioutil.ReadDir(dir)
|
||||||
|
logger.Eexit(err, "could not read from template filters dir '%s'", dir)
|
||||||
|
for _, f := range files {
|
||||||
|
if !f.IsDir() {
|
||||||
|
switch path.Ext(f.Name()) {
|
||||||
|
case ".js":
|
||||||
|
fileBase := strings.TrimSuffix(f.Name(), ".js")
|
||||||
|
jsFile := dir + "/" + f.Name()
|
||||||
|
logger.D("trying to register filter from: %s", jsFile)
|
||||||
|
/*
|
||||||
|
jsStr, err := ioutil.ReadFile(jsFile)
|
||||||
|
if err != nil {
|
||||||
|
Log.Panicf("could not read '%s': %s", jsFile, err)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
vm := motto.New()
|
||||||
|
fn, err := vm.Run(jsFile)
|
||||||
|
logger.Eexit(err, "error in javascript vm for '%s'", jsFile)
|
||||||
|
if !fn.IsFunction() {
|
||||||
|
logger.Exit("%s does not contain a function code", jsFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = pongo2.RegisterFilter(
|
||||||
|
fileBase,
|
||||||
|
func(in, param *pongo2.Value) (out *pongo2.Value, erro *pongo2.Error) {
|
||||||
|
thisObj, _ := vm.Object("({})")
|
||||||
|
var err error
|
||||||
|
if mark2web.CurrentContext != nil {
|
||||||
|
err = thisObj.Set("context", *mark2web.CurrentContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Perr(err, "could not set context in '%s': %s", jsFile)
|
||||||
|
ret, err := fn.Call(thisObj.Value(), in.Interface(), param.Interface())
|
||||||
|
logger.Eexit(err, "error in javascript file '%s' while calling returned function", jsFile)
|
||||||
|
|
||||||
|
retGo, err := ret.Export()
|
||||||
|
logger.Perr(err, "export error for '%s'", jsFile)
|
||||||
|
return pongo2.AsValue(retGo), nil
|
||||||
|
},
|
||||||
|
)
|
||||||
|
logger.Perr(err, "could not register filter from '%s'", jsFile)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
12
pkg/filter/dump.go
Normal file
12
pkg/filter/dump.go
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DumpFilter is a pongo2 filter, which returns a spew.Dump of the input
|
||||||
|
func DumpFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
||||||
|
dumpString := spew.Sdump(in.Interface())
|
||||||
|
return pongo2.AsValue(string(dumpString)), nil
|
||||||
|
}
|
24
pkg/filter/dump_test.go
Normal file
24
pkg/filter/dump_test.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDumpFilter(t *testing.T) {
|
||||||
|
Convey("set context", t, func() {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"testvar": "test",
|
||||||
|
}
|
||||||
|
Convey("parse template", func() {
|
||||||
|
output, err := pongo2.RenderTemplateString("{{ testvar|safe|dump }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, spew.Sdump("test"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
243
pkg/filter/image_process.go
Normal file
243
pkg/filter/image_process.go
Normal file
@ -0,0 +1,243 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"image"
|
||||||
|
"image/gif"
|
||||||
|
"image/jpeg"
|
||||||
|
"image/png"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/jobm"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/webrequest"
|
||||||
|
"github.com/disintegration/imaging"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
func parseImageParams(str string) (*mark2web.ImagingConfig, error) {
|
||||||
|
p := mark2web.ImagingConfig{}
|
||||||
|
if str == "" {
|
||||||
|
helper.Merge(&p, mark2web.CurrentTreeNode.Config.Imaging)
|
||||||
|
// Filename and Format are only valid for current image
|
||||||
|
p.Filename = ""
|
||||||
|
p.Format = ""
|
||||||
|
return &p, nil
|
||||||
|
}
|
||||||
|
for _, s := range strings.Split(str, ",") {
|
||||||
|
e := strings.Split(s, "=")
|
||||||
|
if len(e) < 2 {
|
||||||
|
return nil, fmt.Errorf("invalid image parameter: %s", s)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
switch e[0] {
|
||||||
|
case "w":
|
||||||
|
p.Width, err = strconv.Atoi(e[1])
|
||||||
|
case "h":
|
||||||
|
p.Height, err = strconv.Atoi(e[1])
|
||||||
|
case "f":
|
||||||
|
p.Filename = e[1]
|
||||||
|
case "t":
|
||||||
|
p.TargetDir = e[1]
|
||||||
|
case "p":
|
||||||
|
p.Process = e[1]
|
||||||
|
case "a":
|
||||||
|
p.Anchor = e[1]
|
||||||
|
case "q":
|
||||||
|
p.Quality, err = strconv.Atoi(e[1])
|
||||||
|
if p.Quality < 0 || p.Quality > 100 {
|
||||||
|
err = errors.New("q= must be between 1 and 100")
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("invalid image parameter: %s", s)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("could not convert image parameter to correct value type for '%s': %s", s, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageProcessFilter read the image url and process parameters and saves the resized/processed image
|
||||||
|
// param: w=WITDH,h=HEIGHT
|
||||||
|
func ImageProcessFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
||||||
|
imgSource := in.String()
|
||||||
|
p, err := parseImageParams(param.String())
|
||||||
|
if err != nil {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:image_resize",
|
||||||
|
OrigError: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p == nil {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:image_resize",
|
||||||
|
OrigError: errors.New("no imaging config defined"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var img image.Image
|
||||||
|
if p.Process == "" {
|
||||||
|
p.Process = "resize"
|
||||||
|
}
|
||||||
|
filePrefix := fmt.Sprintf(
|
||||||
|
"%s_%dx%d_q%03d",
|
||||||
|
p.Process,
|
||||||
|
p.Width,
|
||||||
|
p.Height,
|
||||||
|
p.Quality,
|
||||||
|
)
|
||||||
|
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
|
||||||
|
// build filename
|
||||||
|
if p.Filename == "" {
|
||||||
|
var fBase string
|
||||||
|
if u, _ := url.Parse(imgSource); u != nil {
|
||||||
|
fBase = path.Base(u.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Filename = fmt.Sprintf(
|
||||||
|
"%s_%x_%s",
|
||||||
|
filePrefix,
|
||||||
|
md5.Sum([]byte(imgSource)),
|
||||||
|
fBase,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// local file
|
||||||
|
imgSource = mark2web.CurrentTreeNode.ResolveInputPath(imgSource)
|
||||||
|
if p.Filename == "" {
|
||||||
|
p.Filename = fmt.Sprintf(
|
||||||
|
"%s_%s",
|
||||||
|
filePrefix,
|
||||||
|
path.Base(imgSource),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var imgTarget string
|
||||||
|
if p.TargetDir != "" {
|
||||||
|
imgTarget = mark2web.CurrentTreeNode.ResolveOutputPath(
|
||||||
|
path.Clean(p.TargetDir) + "/" +
|
||||||
|
p.Filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
pt := path.Dir(imgTarget)
|
||||||
|
if _, err := os.Stat(pt); os.IsNotExist(err) {
|
||||||
|
logger.I("create image target dir: %s", pt)
|
||||||
|
if err := os.MkdirAll(pt, 0755); err != nil {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:image_resize",
|
||||||
|
OrigError: fmt.Errorf("could not create image target dir '%s': %s", pt, err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Filename = mark2web.CurrentTreeNode.ResolveNavPath(p.TargetDir + "/" + p.Filename)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
imgTarget = mark2web.CurrentTreeNode.ResolveOutputPath(p.Filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f, err := os.Stat(imgTarget); err == nil && !f.IsDir() {
|
||||||
|
logger.N("skipped processing image from %s to %s, file already exists", imgSource, imgTarget)
|
||||||
|
} else {
|
||||||
|
jobm.Enqueue(jobm.Job{
|
||||||
|
Function: func() {
|
||||||
|
logger.N("processing image from %s to %s", imgSource, imgTarget)
|
||||||
|
if strings.HasPrefix(imgSource, "http://") || strings.HasPrefix(imgSource, "https://") {
|
||||||
|
// remote file
|
||||||
|
img, p.Format, err = webrequest.GetImage(imgSource)
|
||||||
|
} else {
|
||||||
|
img, err = imaging.Open(imgSource, imaging.AutoOrientation(true))
|
||||||
|
}
|
||||||
|
logger.Eexit(err, "filter:image_resize, could not open image '%s'", imgSource)
|
||||||
|
|
||||||
|
switch p.Process {
|
||||||
|
case "resize":
|
||||||
|
img = imaging.Resize(img, p.Width, p.Height, imaging.Lanczos)
|
||||||
|
case "fit":
|
||||||
|
img = imaging.Fit(img, p.Width, p.Height, imaging.Lanczos)
|
||||||
|
case "fill":
|
||||||
|
var anchor imaging.Anchor
|
||||||
|
switch strings.ToLower(p.Anchor) {
|
||||||
|
case "":
|
||||||
|
fallthrough
|
||||||
|
case "center":
|
||||||
|
anchor = imaging.Center
|
||||||
|
case "topleft":
|
||||||
|
anchor = imaging.TopLeft
|
||||||
|
case "top":
|
||||||
|
anchor = imaging.Top
|
||||||
|
case "topright":
|
||||||
|
anchor = imaging.TopRight
|
||||||
|
case "left":
|
||||||
|
anchor = imaging.Left
|
||||||
|
case "right":
|
||||||
|
anchor = imaging.Right
|
||||||
|
case "bottomleft":
|
||||||
|
anchor = imaging.BottomLeft
|
||||||
|
case "bottom":
|
||||||
|
anchor = imaging.Bottom
|
||||||
|
case "bottomright":
|
||||||
|
anchor = imaging.BottomRight
|
||||||
|
default:
|
||||||
|
logger.Exit("filter:image_resize, unknown anchor a=%s definition", p.Anchor)
|
||||||
|
}
|
||||||
|
img = imaging.Fill(img, p.Width, p.Height, anchor, imaging.Lanczos)
|
||||||
|
default:
|
||||||
|
logger.Exit("filter:image_resize, invalid p parameter '%s'", p.Process)
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.Format == "" {
|
||||||
|
switch strings.ToLower(path.Ext(imgTarget)) {
|
||||||
|
case ".jpg", ".jpeg", ".gif", ".png":
|
||||||
|
var encodeOptions = make([]imaging.EncodeOption, 0)
|
||||||
|
if p.Quality > 0 {
|
||||||
|
encodeOptions = append(encodeOptions, imaging.JPEGQuality(p.Quality))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = imaging.Save(img, imgTarget, encodeOptions...)
|
||||||
|
logger.Eerr(err, "filter:image_resize, could save image '%s'", imgTarget)
|
||||||
|
default:
|
||||||
|
logger.Exit("filter:image_resize, invalid filename extension for image: %s", imgTarget)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out, err := os.Create(imgTarget)
|
||||||
|
logger.Eexit(err, "filter:image_resize, could not create image file '%s'", imgTarget)
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
switch p.Format {
|
||||||
|
case "jpeg", "jpg":
|
||||||
|
var jpegOpt *jpeg.Options
|
||||||
|
if p.Quality > 0 {
|
||||||
|
jpegOpt = &jpeg.Options{
|
||||||
|
Quality: p.Quality,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = jpeg.Encode(out, img, jpegOpt)
|
||||||
|
case "png":
|
||||||
|
err = png.Encode(out, img)
|
||||||
|
case "gif":
|
||||||
|
err = gif.Encode(out, img, nil)
|
||||||
|
default:
|
||||||
|
logger.Exit("filter:image_resize, unknown format '%s' for '%s'", p.Format, imgSource)
|
||||||
|
}
|
||||||
|
logger.Eexit(err, "filter:image_resize, could not encode image file '%s'", imgTarget)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.N("finished image: %s", imgTarget)
|
||||||
|
},
|
||||||
|
Description: imgSource,
|
||||||
|
Category: "image process",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return pongo2.AsValue(mark2web.CurrentTreeNode.ResolveNavPath(p.Filename)), nil
|
||||||
|
}
|
55
pkg/filter/image_process_test.go
Normal file
55
pkg/filter/image_process_test.go
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/jobm"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImageProcessFilter(t *testing.T) {
|
||||||
|
Convey("set context", t, func() {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"testlocal": "/img/test.jpg",
|
||||||
|
"testurl": "http://url",
|
||||||
|
}
|
||||||
|
|
||||||
|
// we want to check files after function calls, so no multithreading
|
||||||
|
jobm.SetNumCPU(1)
|
||||||
|
|
||||||
|
mark2web.Config.Directories.Input = "../../test/in"
|
||||||
|
mark2web.Config.Directories.Output = "../../test/out"
|
||||||
|
|
||||||
|
mark2web.CurrentTreeNode = &mark2web.TreeNode{
|
||||||
|
InputPath: "../../test/in/content",
|
||||||
|
OutputPath: "../../test/out",
|
||||||
|
Config: &mark2web.PathConfig{
|
||||||
|
Imaging: &mark2web.ImagingConfig{
|
||||||
|
Quality: 60,
|
||||||
|
Height: 300,
|
||||||
|
Width: 300,
|
||||||
|
Process: "fit",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Remove("../../test/out/fit_300x300_q060_test.jpg")
|
||||||
|
|
||||||
|
Convey("local image with defaults", func() {
|
||||||
|
output, err := pongo2.RenderTemplateString("{{ testlocal|image_process }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "fit_300x300_q060_test.jpg")
|
||||||
|
|
||||||
|
Convey("local image with fit", func() {
|
||||||
|
output, err := pongo2.RenderTemplateString(`{{ testlocal|image_process:"p=fit,w=300,h=300,q=60,t=/" }}`, ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "/fit_300x300_q060_test.jpg")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
29
pkg/filter/init.go
Normal file
29
pkg/filter/init.go
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "github.com/flosch/pongo2-addons"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
err := pongo2.ReplaceFilter("markdown", MarkdownFilter)
|
||||||
|
// if err != nil {
|
||||||
|
// err = pongo2.RegisterFilter("markdown", MarkdownFilter)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
// }
|
||||||
|
|
||||||
|
newFilters := map[string]pongo2.FilterFunction{
|
||||||
|
"image_process": ImageProcessFilter,
|
||||||
|
"relative_path": RelativePathFilter,
|
||||||
|
"json": JSONFilter,
|
||||||
|
"dump": DumpFilter,
|
||||||
|
}
|
||||||
|
for name, fn := range newFilters {
|
||||||
|
err := pongo2.RegisterFilter(name, fn)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
35
pkg/filter/json.go
Normal file
35
pkg/filter/json.go
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// JSONFilter is a pongo2 filter, which returns a json string of the input
|
||||||
|
func JSONFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
||||||
|
pretty := false
|
||||||
|
for _, s := range strings.Split(param.String(), ",") {
|
||||||
|
switch s {
|
||||||
|
case "pretty":
|
||||||
|
pretty = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
var jsonBytes []byte
|
||||||
|
if pretty {
|
||||||
|
jsonBytes, err = json.MarshalIndent(in.Interface(), "", " ")
|
||||||
|
|
||||||
|
} else {
|
||||||
|
jsonBytes, err = json.Marshal(in.Interface())
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:json",
|
||||||
|
OrigError: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pongo2.AsSafeValue(string(jsonBytes)), nil
|
||||||
|
}
|
50
pkg/filter/json_test.go
Normal file
50
pkg/filter/json_test.go
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestJSONFilter(t *testing.T) {
|
||||||
|
Convey("set context", t, func() {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"teststr": "test",
|
||||||
|
"testmap": map[string]interface{}{
|
||||||
|
"float": 1.23,
|
||||||
|
"int": 5,
|
||||||
|
"str": "test",
|
||||||
|
},
|
||||||
|
"testerr": math.Inf(1),
|
||||||
|
}
|
||||||
|
Convey("parse template", func() {
|
||||||
|
output, err := pongo2.RenderTemplateString("{{ teststr|safe|json }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, `"test"`)
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testmap|safe|json }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, `{"float":1.23,"int":5,"str":"test"}`)
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString(`{{ testmap|safe|json:"pretty" }}`, ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, `{
|
||||||
|
"float": 1.23,
|
||||||
|
"int": 5,
|
||||||
|
"str": "test"
|
||||||
|
}`)
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testnil|safe|json }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, `null`)
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testerr|safe|json }}", ctx)
|
||||||
|
So(err, ShouldNotBeNil)
|
||||||
|
So(output, ShouldEqual, ``)
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
51
pkg/filter/markdown.go
Normal file
51
pkg/filter/markdown.go
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MarkdownFilter is a pongo2 filter, which converts markdown to html
|
||||||
|
func MarkdownFilter(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
||||||
|
chromaRenderer := false
|
||||||
|
chromaStyle := ""
|
||||||
|
if pStr := param.String(); pStr != "" {
|
||||||
|
for _, s := range strings.Split(pStr, ",") {
|
||||||
|
e := strings.Split(s, "=")
|
||||||
|
if len(e) < 2 {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:markdown",
|
||||||
|
OrigError: fmt.Errorf("invalid parameter: %s", s),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch e[0] {
|
||||||
|
case "s":
|
||||||
|
if e[1] == "" {
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:markdown",
|
||||||
|
OrigError: fmt.Errorf("need a syntax sheme name for parameter '%s='", e[0]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
chromaRenderer = true
|
||||||
|
chromaStyle = e[1]
|
||||||
|
default:
|
||||||
|
return nil, &pongo2.Error{
|
||||||
|
Sender: "filter:markdown",
|
||||||
|
OrigError: fmt.Errorf("unknown parameter '%s='", e[0]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pongo2.AsSafeValue(
|
||||||
|
string(
|
||||||
|
helper.RenderMarkdown(
|
||||||
|
[]byte(in.String()),
|
||||||
|
chromaRenderer,
|
||||||
|
chromaStyle,
|
||||||
|
))),
|
||||||
|
nil
|
||||||
|
}
|
49
pkg/filter/markdown_test.go
Normal file
49
pkg/filter/markdown_test.go
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMarkdownFilter(t *testing.T) {
|
||||||
|
Convey("set context", t, func() {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"testvar": "# test",
|
||||||
|
"testcode": "```sh\ntest=test\n```",
|
||||||
|
}
|
||||||
|
Convey("parse template", func() {
|
||||||
|
output, err := pongo2.RenderTemplateString("{{ testvar|markdown }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "<h1>test</h1>\n")
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testcode|markdown }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, `<pre><code class="language-sh">test=test
|
||||||
|
</code></pre>
|
||||||
|
`)
|
||||||
|
|
||||||
|
_, err = pongo2.RenderTemplateString(`{{ testcode|markdown:"s=monokai" }}`, ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
// So(output, ShouldEqual, `<pre tabindex="0" style="color:#f8f8f2;background-color:#272822;"><code><span style="display:flex;"><span>test<span style="color:#f92672">=</span>test
|
||||||
|
// </span></span></code></pre>`)
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString(`{{ testcode|markdown:"s=" }}`, ctx)
|
||||||
|
So(output, ShouldBeEmpty)
|
||||||
|
So(err, ShouldNotBeNil)
|
||||||
|
So(err.Error(), ShouldContainSubstring, "need a syntax sheme name for parameter")
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString(`{{ testcode|markdown:"test=test" }}`, ctx)
|
||||||
|
So(output, ShouldBeEmpty)
|
||||||
|
So(err, ShouldNotBeNil)
|
||||||
|
So(err.Error(), ShouldContainSubstring, "unknown parameter")
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString(`{{ testcode|markdown:"s=monokai,test" }}`, ctx)
|
||||||
|
So(output, ShouldBeEmpty)
|
||||||
|
So(err, ShouldNotBeNil)
|
||||||
|
So(err.Error(), ShouldContainSubstring, "invalid parameter: test")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
15
pkg/filter/relative_path.go
Normal file
15
pkg/filter/relative_path.go
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RelativePathFilter returns the relative path to navpoint based on current nav
|
||||||
|
func RelativePathFilter(in, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
|
||||||
|
return pongo2.AsValue(
|
||||||
|
mark2web.CurrentTreeNode.ResolveNavPath(
|
||||||
|
in.String(),
|
||||||
|
),
|
||||||
|
), nil
|
||||||
|
}
|
53
pkg/filter/relative_path_test.go
Normal file
53
pkg/filter/relative_path_test.go
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
package filter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/mark2web"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRelativePathFilter(t *testing.T) {
|
||||||
|
Convey("set context", t, func() {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"testrel": "rel",
|
||||||
|
"testabs": "/abs",
|
||||||
|
"testsub": "../sub/rel",
|
||||||
|
}
|
||||||
|
|
||||||
|
mark2web.Config.Directories.Output = "../../test/out"
|
||||||
|
|
||||||
|
mark2web.CurrentTreeNode = &mark2web.TreeNode{
|
||||||
|
InputPath: "../../test/in/content",
|
||||||
|
OutputPath: "../../test/out/sub",
|
||||||
|
Config: &mark2web.PathConfig{
|
||||||
|
Imaging: &mark2web.ImagingConfig{
|
||||||
|
Quality: 60,
|
||||||
|
Height: 300,
|
||||||
|
Width: 300,
|
||||||
|
Process: "fit",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
Convey("parse template", func() {
|
||||||
|
|
||||||
|
output, err := pongo2.RenderTemplateString("{{ testrel|relative_path }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "rel")
|
||||||
|
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testabs|relative_path }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "../abs")
|
||||||
|
|
||||||
|
/* TODO
|
||||||
|
output, err = pongo2.RenderTemplateString("{{ testsub|relative_path }}", ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(output, ShouldEqual, "rel")
|
||||||
|
*/
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
27
pkg/helper/dir.go
Normal file
27
pkg/helper/dir.go
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CreateDirectory creates direcory with all missing parents and panic if error
|
||||||
|
func CreateDirectory(dir string) {
|
||||||
|
logger.D("trying to create output directory: %s", dir)
|
||||||
|
|
||||||
|
if dirH, err := os.Stat(dir); os.IsNotExist(err) {
|
||||||
|
err := os.MkdirAll(dir, 0755)
|
||||||
|
logger.Eexit(err, "could not create output directory '%s'", dir)
|
||||||
|
|
||||||
|
logger.I("created output directory: %s", dir)
|
||||||
|
} else if dirH != nil {
|
||||||
|
if dirH.IsDir() {
|
||||||
|
logger.I("output directory '%s' already exists", dir)
|
||||||
|
} else {
|
||||||
|
logger.Exit("output directory '%s' is no directory", dir)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.Perr(err, "unknown error for output directory '%s'", dir)
|
||||||
|
}
|
||||||
|
}
|
47
pkg/helper/map_string.go
Normal file
47
pkg/helper/map_string.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// MapString is a map[string]interface{} which always unmarsahls yaml to map[string]interface{}
|
||||||
|
type MapString map[string]interface{}
|
||||||
|
|
||||||
|
// UnmarshalYAML handles all maps as map[string]interface{} for later JSON
|
||||||
|
// see https://github.com/elastic/beats/blob/6435194af9f42cbf778ca0a1a92276caf41a0da8/libbeat/common/mapstr.go
|
||||||
|
func (ms *MapString) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
var result map[interface{}]interface{}
|
||||||
|
err := unmarshal(&result)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*ms = cleanUpInterfaceMap(result)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanUpInterfaceArray(in []interface{}) []interface{} {
|
||||||
|
result := make([]interface{}, len(in))
|
||||||
|
for i, v := range in {
|
||||||
|
result[i] = cleanUpMapValue(v)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanUpInterfaceMap(in map[interface{}]interface{}) MapString {
|
||||||
|
result := make(MapString)
|
||||||
|
for k, v := range in {
|
||||||
|
result[fmt.Sprintf("%v", k)] = cleanUpMapValue(v)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanUpMapValue(v interface{}) interface{} {
|
||||||
|
switch v := v.(type) {
|
||||||
|
case []interface{}:
|
||||||
|
return cleanUpInterfaceArray(v)
|
||||||
|
case map[interface{}]interface{}:
|
||||||
|
return cleanUpInterfaceMap(v)
|
||||||
|
case string, bool, int, int8, int16, int32, int64, float32, float64:
|
||||||
|
return v
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
}
|
||||||
|
}
|
30
pkg/helper/markdown.go
Normal file
30
pkg/helper/markdown.go
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/Depado/bfchroma"
|
||||||
|
"github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RenderMarkdown renders input to html with chroma syntax highlighting if wanted
|
||||||
|
func RenderMarkdown(input []byte, chromaRenderer bool, chromaStyle string) []byte {
|
||||||
|
var options []blackfriday.Option
|
||||||
|
|
||||||
|
if chromaStyle == "" {
|
||||||
|
chromaStyle = "monokai"
|
||||||
|
}
|
||||||
|
if chromaRenderer {
|
||||||
|
options = []blackfriday.Option{
|
||||||
|
blackfriday.WithRenderer(
|
||||||
|
bfchroma.NewRenderer(
|
||||||
|
bfchroma.Style(chromaStyle),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fix \r from markdown for blackfriday
|
||||||
|
input = bytes.Replace(input, []byte("\r"), []byte(""), -1)
|
||||||
|
return blackfriday.Run(input, options...)
|
||||||
|
}
|
28
pkg/helper/merge.go
Normal file
28
pkg/helper/merge.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/imdario/mergo"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ptrTransformer struct{}
|
||||||
|
|
||||||
|
func (t ptrTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
|
||||||
|
if typ.Kind() == reflect.Ptr {
|
||||||
|
return func(dst, src reflect.Value) error {
|
||||||
|
if dst.CanSet() {
|
||||||
|
if dst.IsNil() {
|
||||||
|
dst.Set(src)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge merges 2 objects or maps
|
||||||
|
func Merge(dst, src interface{}) error {
|
||||||
|
return mergo.Merge(dst, src, mergo.WithTransformers(ptrTransformer{}))
|
||||||
|
}
|
18
pkg/helper/regexp.go
Normal file
18
pkg/helper/regexp.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
import "regexp"
|
||||||
|
|
||||||
|
// GetRegexpParams gets a map of named regexp group matches
|
||||||
|
// use pe. (?P<Year>\d{4})-(?P<Month>\d{2})-(?P<Day>\d{2}) as regexp
|
||||||
|
func GetRegexpParams(regEx *regexp.Regexp, str string) (paramsMap map[string]string) {
|
||||||
|
|
||||||
|
match := regEx.FindStringSubmatch(str)
|
||||||
|
|
||||||
|
paramsMap = make(map[string]string)
|
||||||
|
for i, name := range regEx.SubexpNames() {
|
||||||
|
if i > 0 && i <= len(match) {
|
||||||
|
paramsMap[name] = match[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
16
pkg/helper/string.go
Normal file
16
pkg/helper/string.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
// ShortenStringLeft shortens a string
|
||||||
|
func ShortenStringLeft(str string, num int) string {
|
||||||
|
if num <= 4 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
tstr := str
|
||||||
|
if len(str) > num {
|
||||||
|
if num > 3 {
|
||||||
|
num -= 3
|
||||||
|
}
|
||||||
|
tstr = "..." + str[len(str)-num:]
|
||||||
|
}
|
||||||
|
return tstr
|
||||||
|
}
|
61
pkg/jobm/jobmanager.go
Normal file
61
pkg/jobm/jobmanager.go
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package jobm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"runtime"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/progress"
|
||||||
|
)
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
var numCPU = runtime.NumCPU()
|
||||||
|
|
||||||
|
// Job is a wrapper to descripe a Job function
|
||||||
|
type Job struct {
|
||||||
|
Function func()
|
||||||
|
Description string
|
||||||
|
Category string
|
||||||
|
}
|
||||||
|
|
||||||
|
var jobChan = make(chan []Job)
|
||||||
|
|
||||||
|
func worker(jobChan <-chan []Job) {
|
||||||
|
defer wg.Done()
|
||||||
|
|
||||||
|
for jobs := range jobChan {
|
||||||
|
for _, job := range jobs {
|
||||||
|
progress.DescribeCurrent(job.Category, job.Description)
|
||||||
|
job.Function()
|
||||||
|
progress.IncrDone(job.Category)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
//logger.I("number of CPU core: %d", numCPU)
|
||||||
|
// one core for main thread
|
||||||
|
for i := 0; i < numCPU; i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go worker(jobChan)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enqueue enqueues a job to the job queue
|
||||||
|
func Enqueue(jobs ...Job) {
|
||||||
|
for _, job := range jobs {
|
||||||
|
progress.IncrTotal(job.Category)
|
||||||
|
}
|
||||||
|
jobChan <- jobs
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait will wait for all jobs to finish
|
||||||
|
func Wait() {
|
||||||
|
close(jobChan)
|
||||||
|
progress.Stop()
|
||||||
|
wg.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNumCPU is for testing package without threading
|
||||||
|
func SetNumCPU(i int) {
|
||||||
|
numCPU = i
|
||||||
|
}
|
138
pkg/logger/logger.go
Normal file
138
pkg/logger/logger.go
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
package logger
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/op/go-logging"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Log is global logger
|
||||||
|
var Log = logging.MustGetLogger("myLogger")
|
||||||
|
var logBackendLeveled logging.LeveledBackend
|
||||||
|
|
||||||
|
var Prefix = ""
|
||||||
|
|
||||||
|
// SetLogLevel sets log level for global logger (debug, info, notice, warning, error)
|
||||||
|
func SetLogLevel(level string) {
|
||||||
|
logBackendLevel := logging.INFO
|
||||||
|
switch level {
|
||||||
|
case "debug":
|
||||||
|
logBackendLevel = logging.DEBUG
|
||||||
|
break
|
||||||
|
|
||||||
|
case "info":
|
||||||
|
logBackendLevel = logging.INFO
|
||||||
|
break
|
||||||
|
|
||||||
|
case "notice":
|
||||||
|
logBackendLevel = logging.NOTICE
|
||||||
|
break
|
||||||
|
|
||||||
|
case "warning":
|
||||||
|
logBackendLevel = logging.WARNING
|
||||||
|
break
|
||||||
|
|
||||||
|
case "error":
|
||||||
|
logBackendLevel = logging.ERROR
|
||||||
|
break
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
logBackendLeveled.SetLevel(logBackendLevel, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
// configureLogger sets logger backend and level
|
||||||
|
func configureLogger() {
|
||||||
|
logBackend := logging.NewLogBackend(os.Stderr, "", 0)
|
||||||
|
logBackendFormatter := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(
|
||||||
|
`%{color}%{time:2006-01-02 15:04:05.000} ▶ %{level:.4s} %{id:03x}%{color:reset} %{message}`,
|
||||||
|
))
|
||||||
|
logBackendLeveled = logging.AddModuleLevel(logBackendFormatter)
|
||||||
|
logBackendLeveled.SetLevel(logging.NOTICE, "")
|
||||||
|
logging.SetBackend(logBackendLeveled)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
spew.Config.DisablePointerAddresses = true
|
||||||
|
spew.Config.DisableCapacities = true
|
||||||
|
spew.Config.DisableMethods = true
|
||||||
|
spew.Config.DisablePointerMethods = true
|
||||||
|
configureLogger()
|
||||||
|
}
|
||||||
|
|
||||||
|
func prefix() string {
|
||||||
|
return Prefix
|
||||||
|
}
|
||||||
|
|
||||||
|
// D is shorthand for Debugf
|
||||||
|
func D(format string, args ...interface{}) {
|
||||||
|
Log.Debugf(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// I is shorthand for Infof
|
||||||
|
func I(format string, args ...interface{}) {
|
||||||
|
Log.Infof(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// N is shorthand for Noticef
|
||||||
|
func N(format string, args ...interface{}) {
|
||||||
|
Log.Noticef(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// W is shorthand for Warningf
|
||||||
|
func W(format string, args ...interface{}) {
|
||||||
|
Log.Warningf(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// E is shorthand for Errorf
|
||||||
|
func E(format string, args ...interface{}) {
|
||||||
|
Log.Errorf(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// P is shorthand for Panicf
|
||||||
|
func P(format string, args ...interface{}) {
|
||||||
|
Log.Panicf(prefix()+format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Eerr is shorthand for
|
||||||
|
// if err != nil {
|
||||||
|
// Log.Errorf(...)
|
||||||
|
// }
|
||||||
|
func Eerr(err error, format string, args ...interface{}) {
|
||||||
|
if err != nil {
|
||||||
|
args = append(args, err)
|
||||||
|
Log.Errorf(prefix()+format+" (Error: %s)", args...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Eexit is shorthand for
|
||||||
|
// if err != nil {
|
||||||
|
// Log.Errorf(...)
|
||||||
|
// os.Exit(1)
|
||||||
|
// }
|
||||||
|
func Eexit(err error, format string, args ...interface{}) {
|
||||||
|
Eerr(err, format, args...)
|
||||||
|
if err != nil {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit is shorthand for
|
||||||
|
// Log.Errorf(...)
|
||||||
|
// os.Exit(1)
|
||||||
|
func Exit(format string, args ...interface{}) {
|
||||||
|
E(format, args...)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perr is shorthand for
|
||||||
|
// if err != nil {
|
||||||
|
// Log.Panicf(...)
|
||||||
|
// }
|
||||||
|
func Perr(err error, format string, args ...interface{}) {
|
||||||
|
if err != nil {
|
||||||
|
args = append(args, err)
|
||||||
|
Log.Panicf(prefix()+format+" (Error: %s)", args...)
|
||||||
|
}
|
||||||
|
}
|
53
pkg/mark2web/assets.go
Normal file
53
pkg/mark2web/assets.go
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
cpy "github.com/otiai10/copy"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProcessAssets copies the assets from input to output dir
|
||||||
|
func ProcessAssets() {
|
||||||
|
switch Config.Assets.Action {
|
||||||
|
case "copy":
|
||||||
|
from := Config.Assets.FromPath
|
||||||
|
to := Config.Assets.ToPath
|
||||||
|
if !strings.HasPrefix(from, "/") {
|
||||||
|
from = Config.Directories.Input + "/" + from
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(to, "/") {
|
||||||
|
to = Config.Directories.Output + "/" + to
|
||||||
|
}
|
||||||
|
logger.N("copying assets from '%s' to '%s'", from, to)
|
||||||
|
err := cpy.Copy(from, to)
|
||||||
|
|
||||||
|
logger.Perr(err, "could not copy assets from '%s' to '%s'", from, to)
|
||||||
|
|
||||||
|
if Config.Assets.Compress {
|
||||||
|
compressFilesInDir(to)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixAssetsPath replaces assets path based on current path
|
||||||
|
func (node *TreeNode) fixAssetsPath(str string) string {
|
||||||
|
if find := Config.Assets.FixTemplate.Find; find != "" {
|
||||||
|
logger.D("fixing assets paths for path '%s'", node.CurrentNavPath())
|
||||||
|
repl := Config.Assets.FixTemplate.Replace
|
||||||
|
toPath := Config.Assets.ToPath
|
||||||
|
|
||||||
|
bToRoot := node.BackToRootPath()
|
||||||
|
regex, err := regexp.Compile(find)
|
||||||
|
logger.Eexit(err, "could not compile regexp '%s' for assets path", find)
|
||||||
|
|
||||||
|
repl = bToRoot + toPath + "/" + repl
|
||||||
|
repl = path.Clean(repl) + "/"
|
||||||
|
logger.D("new assets paths: %s", repl)
|
||||||
|
return regex.ReplaceAllString(str, repl)
|
||||||
|
}
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
39
pkg/mark2web/brotli.go
Normal file
39
pkg/mark2web/brotli.go
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"github.com/andybalholm/brotli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var brotliSupported = true
|
||||||
|
|
||||||
|
func handleBrotliCompression(filename string, content []byte) {
|
||||||
|
brFilename := filename + ".br"
|
||||||
|
|
||||||
|
logger.I("writing to compressed output file: %s", brFilename)
|
||||||
|
|
||||||
|
f, err := os.Create(brFilename)
|
||||||
|
logger.Eexit(err, "could not create file '%s'", brFilename)
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
bw := brotli.NewWriterLevel(f, brotli.BestCompression)
|
||||||
|
defer bw.Close()
|
||||||
|
|
||||||
|
if content != nil {
|
||||||
|
// content given
|
||||||
|
_, err = bw.Write(content)
|
||||||
|
logger.Eexit(err, "could not write brotli content for '%s'", filename)
|
||||||
|
} else {
|
||||||
|
// read file
|
||||||
|
r, err := os.Open(filename)
|
||||||
|
logger.Eexit(err, "could not open file '%s'", filename)
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(bw, r)
|
||||||
|
logger.Eexit(err, "could not write brotli file for '%s'", filename)
|
||||||
|
}
|
||||||
|
}
|
196
pkg/mark2web/collection.go
Normal file
196
pkg/mark2web/collection.go
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/webrequest"
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
type colCacheEntry struct {
|
||||||
|
data interface{}
|
||||||
|
hit int
|
||||||
|
navnames []string
|
||||||
|
}
|
||||||
|
|
||||||
|
var colCache = make(map[string]*colCacheEntry)
|
||||||
|
|
||||||
|
func (node *TreeNode) handleCollections() {
|
||||||
|
collections := append(node.Config.Collections, node.Config.This.Collections...)
|
||||||
|
for _, colConfig := range collections {
|
||||||
|
if colConfig.Name == nil || *colConfig.Name == "" {
|
||||||
|
logger.Exit("missing Name in collection config in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
if (colConfig.URL == nil || *colConfig.URL == "") &&
|
||||||
|
(colConfig.Directory == nil) {
|
||||||
|
logger.Exit("missing URL and Directory in collection config in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.ColMap == nil {
|
||||||
|
node.ColMap = make(helper.MapString)
|
||||||
|
}
|
||||||
|
ctx := NewContext()
|
||||||
|
ctx["This"] = node.Config.This
|
||||||
|
ctx["Data"] = node.Config.Data
|
||||||
|
|
||||||
|
var colData interface{}
|
||||||
|
|
||||||
|
errSrcText := ""
|
||||||
|
cacheKey := ""
|
||||||
|
|
||||||
|
if colConfig.URL != nil {
|
||||||
|
url, err := pongo2.RenderTemplateString(*colConfig.URL, ctx)
|
||||||
|
logger.Eexit(err, "invalid template string for Collection Element.URL in '%s'", node.InputPath)
|
||||||
|
|
||||||
|
errSrcText = "URL " + url
|
||||||
|
cacheKey = url
|
||||||
|
|
||||||
|
if cacheEntry, ok := colCache[url]; ok {
|
||||||
|
colData = cacheEntry.data
|
||||||
|
cacheEntry.hit++
|
||||||
|
} else {
|
||||||
|
logger.N("reading collection from: %s", errSrcText)
|
||||||
|
colData = webrequest.GetJSON(url)
|
||||||
|
colCache[url] = &colCacheEntry{
|
||||||
|
data: colData,
|
||||||
|
navnames: make([]string, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
path := node.ResolveInputPath(colConfig.Directory.Path)
|
||||||
|
errSrcText = "DIR " + path
|
||||||
|
|
||||||
|
logger.N("reading collection from: %s", errSrcText)
|
||||||
|
d, err := ioutil.ReadDir(path)
|
||||||
|
logger.Eexit(err, "could not read directory '%s'", path)
|
||||||
|
|
||||||
|
mStr := "."
|
||||||
|
if colConfig.Directory.MatchFilename != "" {
|
||||||
|
mStr = colConfig.Directory.MatchFilename
|
||||||
|
}
|
||||||
|
matcher, err := regexp.Compile(mStr)
|
||||||
|
logger.Eexit(err, "could not compile regex for MatchFilename '%s' in '%s'", mStr, path)
|
||||||
|
|
||||||
|
if colConfig.Directory.ReverseOrder {
|
||||||
|
for i := len(d)/2 - 1; i >= 0; i-- {
|
||||||
|
opp := len(d) - 1 - i
|
||||||
|
d[i], d[opp] = d[opp], d[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fcolData := make([]pongo2.Context, 0)
|
||||||
|
for _, fh := range d {
|
||||||
|
if !fh.IsDir() && matcher.MatchString(fh.Name()) {
|
||||||
|
inFile := path + "/" + fh.Name()
|
||||||
|
md, err := ioutil.ReadFile(inFile)
|
||||||
|
logger.Eexit(err, "could not read file '%s'", inFile)
|
||||||
|
|
||||||
|
_, ctx := node.processMarkdownWithHeader(md, inFile)
|
||||||
|
(*ctx)["FilenameMatch"] = helper.GetRegexpParams(matcher, fh.Name())
|
||||||
|
fcolData = append(fcolData, *ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
colData = fcolData
|
||||||
|
}
|
||||||
|
|
||||||
|
node.ColMap[*colConfig.Name] = colData
|
||||||
|
|
||||||
|
if navT := colConfig.NavTemplate; navT != nil {
|
||||||
|
var entries []interface{}
|
||||||
|
var ok bool
|
||||||
|
if navT.EntriesAttribute != "" {
|
||||||
|
var colDataMap map[string]interface{}
|
||||||
|
if colDataMap, ok = colData.(map[string]interface{}); ok {
|
||||||
|
entries, ok = colDataMap[navT.EntriesAttribute].([]interface{})
|
||||||
|
if !ok {
|
||||||
|
logger.D(spew.Sdump(colDataMap))
|
||||||
|
logger.Exit("invalid json data in [%s] from '%s' for entries", navT.EntriesAttribute, errSrcText)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
entries, ok = colData.([]interface{})
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
logger.D(spew.Sdump(colData))
|
||||||
|
logger.Exit("invalid json data from '%s', need array of objects for entries or object with configured NavTemplate.EntriesAttribute", errSrcText)
|
||||||
|
}
|
||||||
|
|
||||||
|
// build navigation with detail sites
|
||||||
|
for idx, colEl := range entries {
|
||||||
|
ctxE := make(pongo2.Context)
|
||||||
|
err := helper.Merge(&ctxE, ctx)
|
||||||
|
logger.Eexit(err, "could not merge context in '%s'", node.InputPath)
|
||||||
|
|
||||||
|
var jsonCtx map[string]interface{}
|
||||||
|
if jsonCtx, ok = colEl.(map[string]interface{}); !ok {
|
||||||
|
logger.D(spew.Sdump(colEl))
|
||||||
|
logger.Exit("no json object for entry index %d from '%s'", idx, errSrcText)
|
||||||
|
}
|
||||||
|
err = helper.Merge(&ctxE, pongo2.Context(jsonCtx))
|
||||||
|
logger.Eexit(err, "could not merge context in '%s'", node.InputPath)
|
||||||
|
|
||||||
|
tpl := ""
|
||||||
|
if navT.Template != "" {
|
||||||
|
tpl, err = pongo2.RenderTemplateString(navT.Template, ctxE)
|
||||||
|
logger.Eexit(err, "invalid template string for NavTemplate.Template in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
if tpl == "" {
|
||||||
|
tpl = *node.Config.Template
|
||||||
|
}
|
||||||
|
|
||||||
|
dataKey := ""
|
||||||
|
if navT.DataKey != "" {
|
||||||
|
dataKey, err = pongo2.RenderTemplateString(navT.DataKey, ctxE)
|
||||||
|
logger.Eexit(err, "invalid template string for NavTemplate.DataKey in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
goTo, err := pongo2.RenderTemplateString(navT.GoTo, ctxE)
|
||||||
|
logger.Eexit(err, "invalid template string for NavTemplate.GoTo in '%s'", node.InputPath)
|
||||||
|
|
||||||
|
goTo = strings.Trim(goTo, "/")
|
||||||
|
goTo = path.Clean(goTo)
|
||||||
|
|
||||||
|
if strings.Contains(goTo, "..") {
|
||||||
|
logger.Exit("going back via .. in NavTemplate.GoTo forbidden in collection config in '%s': %s", node.InputPath, goTo)
|
||||||
|
}
|
||||||
|
if goTo == "." {
|
||||||
|
logger.Exit("invalid config '.' for NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
if goTo == "" {
|
||||||
|
logger.Exit("missing NavTemplate.GoTo in collection config in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
navname := ""
|
||||||
|
if navT.Navname != "" {
|
||||||
|
navname, err = pongo2.RenderTemplateString(navT.Navname, ctxE)
|
||||||
|
logger.Eexit(err, "invalid template string for NavTemplate.Navname in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
body := ""
|
||||||
|
if navT.Body != "" {
|
||||||
|
body, err = pongo2.RenderTemplateString(navT.Body, ctxE)
|
||||||
|
logger.Eexit(err, "invalid template string for NavTemplate.Body in '%s'", node.InputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l := len(colCache[cacheKey].navnames); colCache[cacheKey].hit > 1 &&
|
||||||
|
l > 0 &&
|
||||||
|
navname == colCache[cacheKey].navnames[l-1] {
|
||||||
|
// navname before used same url, so recursion loop
|
||||||
|
logger.Exit("collection request loop detected for in '%s' for : %s", node.InputPath, errSrcText)
|
||||||
|
}
|
||||||
|
|
||||||
|
colCache[cacheKey].navnames = append(colCache[cacheKey].navnames, navname)
|
||||||
|
|
||||||
|
node.addSubNode(tpl, goTo, navname, colEl, dataKey, body, navT.Hidden)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
78
pkg/mark2web/compress.go
Normal file
78
pkg/mark2web/compress.go
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"compress/gzip"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/jobm"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
func handleCompression(filename string, content []byte) {
|
||||||
|
jobm.Enqueue(jobm.Job{
|
||||||
|
Function: func() {
|
||||||
|
if _, ok := Config.Compress.Extensions[path.Ext(filename)]; ok {
|
||||||
|
|
||||||
|
if Config.Compress.Brotli {
|
||||||
|
handleBrotliCompression(filename, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Config.Compress.GZIP {
|
||||||
|
gzFilename := filename + ".gz"
|
||||||
|
|
||||||
|
logger.I("writing to compressed output file: %s", gzFilename)
|
||||||
|
|
||||||
|
f, err := os.Create(gzFilename)
|
||||||
|
logger.Eexit(err, "could not create file '%s'", gzFilename)
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
zw, err := gzip.NewWriterLevel(f, gzip.BestCompression)
|
||||||
|
logger.Eexit(err, "could not initialize gzip writer for '%s'", filename)
|
||||||
|
defer zw.Close()
|
||||||
|
|
||||||
|
if content != nil {
|
||||||
|
// content given
|
||||||
|
_, err = zw.Write(content)
|
||||||
|
logger.Eexit(err, "could not write gziped content for '%s'", filename)
|
||||||
|
} else {
|
||||||
|
// read file
|
||||||
|
r, err := os.Open(filename)
|
||||||
|
logger.Eexit(err, "could not open file '%s'", filename)
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(zw, r)
|
||||||
|
logger.Eexit(err, "could not gzip file '%s'", filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Description: filename,
|
||||||
|
Category: "compress",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func compressFilesInDir(dir string) {
|
||||||
|
logger.N("compressing configured files in: %s", dir)
|
||||||
|
|
||||||
|
var _processDir func(string)
|
||||||
|
_processDir = func(d string) {
|
||||||
|
entries, err := ioutil.ReadDir(d)
|
||||||
|
logger.Eexit(err, "could not read dir '%s'", d)
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
_processDir(d + "/" + entry.Name())
|
||||||
|
} else {
|
||||||
|
handleCompression(d+"/"+entry.Name(), nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_processDir(dir)
|
||||||
|
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package config
|
package mark2web
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
@ -13,6 +13,7 @@ type GlobalConfig struct {
|
|||||||
} `yaml:"Webserver"`
|
} `yaml:"Webserver"`
|
||||||
|
|
||||||
Assets struct {
|
Assets struct {
|
||||||
|
Compress bool `yaml:"Compress"`
|
||||||
FromPath string `yaml:"FromPath"`
|
FromPath string `yaml:"FromPath"`
|
||||||
ToPath string `yaml:"ToPath"`
|
ToPath string `yaml:"ToPath"`
|
||||||
Action string `yaml:"Action"`
|
Action string `yaml:"Action"`
|
||||||
@ -26,20 +27,28 @@ type GlobalConfig struct {
|
|||||||
Action string `yaml:"Action"`
|
Action string `yaml:"Action"`
|
||||||
} `yaml:"OtherFiles"`
|
} `yaml:"OtherFiles"`
|
||||||
|
|
||||||
|
Compress struct {
|
||||||
|
Brotli bool `yaml:"Brotli"`
|
||||||
|
GZIP bool `yaml:"GZIP"`
|
||||||
|
Extensions map[string]string `yaml:"Extensions"`
|
||||||
|
} `yaml:"Compress"`
|
||||||
|
|
||||||
Directories struct {
|
Directories struct {
|
||||||
Input string
|
Input string
|
||||||
Output string
|
Output string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Config is global config
|
||||||
var Config = new(GlobalConfig)
|
var Config = new(GlobalConfig)
|
||||||
|
|
||||||
func ReadGlobalConfig(filename string) error {
|
// ReadFromFile reads yaml config from file
|
||||||
|
func (c *GlobalConfig) ReadFromFile(filename string) error {
|
||||||
data, err := ioutil.ReadFile(filename)
|
data, err := ioutil.ReadFile(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
err = yaml.Unmarshal(data, Config)
|
err = yaml.Unmarshal(data, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
@ -1,60 +1,19 @@
|
|||||||
package config
|
package mark2web
|
||||||
|
|
||||||
import (
|
import "gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/imdario/mergo"
|
// CollectionDirectoryConfig specifies how to handle a directory of markdown files as a collection
|
||||||
)
|
type CollectionDirectoryConfig struct {
|
||||||
|
Path string `yaml:"Path"`
|
||||||
// MapString is a map[string]interface{} which always unmarsahls yaml to map[string]interface{}
|
MatchFilename string `yaml:"MatchFilename"`
|
||||||
type MapString map[string]interface{}
|
ReverseOrder bool `yaml:"ReverseOrder"`
|
||||||
|
|
||||||
// UnmarshalYAML handles all maps as map[string]interface{} for later JSON
|
|
||||||
// see https://github.com/elastic/beats/blob/6435194af9f42cbf778ca0a1a92276caf41a0da8/libbeat/common/mapstr.go
|
|
||||||
func (ms *MapString) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var result map[interface{}]interface{}
|
|
||||||
err := unmarshal(&result)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
*ms = cleanUpInterfaceMap(result)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanUpInterfaceArray(in []interface{}) []interface{} {
|
|
||||||
result := make([]interface{}, len(in))
|
|
||||||
for i, v := range in {
|
|
||||||
result[i] = cleanUpMapValue(v)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanUpInterfaceMap(in map[interface{}]interface{}) MapString {
|
|
||||||
result := make(MapString)
|
|
||||||
for k, v := range in {
|
|
||||||
result[fmt.Sprintf("%v", k)] = cleanUpMapValue(v)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanUpMapValue(v interface{}) interface{} {
|
|
||||||
switch v := v.(type) {
|
|
||||||
case []interface{}:
|
|
||||||
return cleanUpInterfaceArray(v)
|
|
||||||
case map[interface{}]interface{}:
|
|
||||||
return cleanUpInterfaceMap(v)
|
|
||||||
case string, bool, int, int8, int16, int32, int64, float32, float64:
|
|
||||||
return v
|
|
||||||
default:
|
|
||||||
return fmt.Sprintf("%v", v)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// CollectionConfig describes a collection
|
// CollectionConfig describes a collection
|
||||||
type CollectionConfig struct {
|
type CollectionConfig struct {
|
||||||
Name *string `yaml:"Name"`
|
Name *string `yaml:"Name"`
|
||||||
URL *string `yaml:"URL"`
|
URL *string `yaml:"URL"`
|
||||||
|
Directory *CollectionDirectoryConfig `yaml:"Directory"`
|
||||||
NavTemplate *struct {
|
NavTemplate *struct {
|
||||||
EntriesAttribute string `yaml:"EntriesAttribute"`
|
EntriesAttribute string `yaml:"EntriesAttribute"`
|
||||||
GoTo string `yaml:"GoTo"`
|
GoTo string `yaml:"GoTo"`
|
||||||
@ -72,7 +31,7 @@ type ThisPathConfig struct {
|
|||||||
Navname *string `yaml:"Navname"`
|
Navname *string `yaml:"Navname"`
|
||||||
GoTo *string `yaml:"GoTo"`
|
GoTo *string `yaml:"GoTo"`
|
||||||
Collections []*CollectionConfig `yaml:"Collections"`
|
Collections []*CollectionConfig `yaml:"Collections"`
|
||||||
Data MapString `yaml:"Data"`
|
Data helper.MapString `yaml:"Data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// IndexConfig describes index input and output file
|
// IndexConfig describes index input and output file
|
||||||
@ -132,41 +91,8 @@ type PathConfig struct {
|
|||||||
Markdown *MarkdownConfig `yaml:"Markdown"`
|
Markdown *MarkdownConfig `yaml:"Markdown"`
|
||||||
Imaging *ImagingConfig `yaml:"Imaging"`
|
Imaging *ImagingConfig `yaml:"Imaging"`
|
||||||
|
|
||||||
Data MapString `yaml:"Data"`
|
Data helper.MapString `yaml:"Data"`
|
||||||
}
|
|
||||||
|
// Collections here are recursive if saved as nav, so request should be filtered
|
||||||
// PathConfigTree is complete config tree of content dir
|
Collections []*CollectionConfig `yaml:"Collections"`
|
||||||
type PathConfigTree struct {
|
|
||||||
InputPath string
|
|
||||||
OutputPath string
|
|
||||||
Hidden bool // for collections which are not part of the navigation
|
|
||||||
|
|
||||||
ColMap MapString
|
|
||||||
|
|
||||||
InputFiles []string
|
|
||||||
OtherFiles []string
|
|
||||||
|
|
||||||
Config *PathConfig
|
|
||||||
Sub []*PathConfigTree
|
|
||||||
}
|
|
||||||
|
|
||||||
type ptrTransformer struct{}
|
|
||||||
|
|
||||||
func (t ptrTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
|
|
||||||
if typ.Kind() == reflect.Ptr {
|
|
||||||
return func(dst, src reflect.Value) error {
|
|
||||||
if dst.CanSet() {
|
|
||||||
if dst.IsNil() {
|
|
||||||
dst.Set(src)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge merges 2 objects or maps
|
|
||||||
func Merge(dst, src interface{}) error {
|
|
||||||
return mergo.Merge(dst, src, mergo.WithTransformers(ptrTransformer{}))
|
|
||||||
}
|
}
|
296
pkg/mark2web/content.go
Normal file
296
pkg/mark2web/content.go
Normal file
@ -0,0 +1,296 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io/ioutil"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/progress"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
cpy "github.com/otiai10/copy"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReadContentDir walks through content directory and builds the tree of configurations
|
||||||
|
func (node *TreeNode) ReadContentDir(inBase string, outBase string, dir string, conf *PathConfig) {
|
||||||
|
progress.IncrTotal("content dir")
|
||||||
|
progress.DescribeCurrent("content dir", "found "+inBase)
|
||||||
|
|
||||||
|
if node.root == nil {
|
||||||
|
// first node is root
|
||||||
|
node.root = node
|
||||||
|
}
|
||||||
|
node.fillConfig(inBase, outBase, dir, conf)
|
||||||
|
|
||||||
|
files, err := ioutil.ReadDir(node.InputPath)
|
||||||
|
logger.Eexit(err, "could not read dir '%s'", node.InputPath)
|
||||||
|
|
||||||
|
// first only files
|
||||||
|
for _, f := range files {
|
||||||
|
p := node.InputPath + "/" + f.Name()
|
||||||
|
if !f.IsDir() && f.Name() != "config.yml" {
|
||||||
|
switch path.Ext(f.Name()) {
|
||||||
|
case ".md":
|
||||||
|
logger.D(".MD %s", p)
|
||||||
|
if node.InputFiles == nil {
|
||||||
|
node.InputFiles = make([]string, 0)
|
||||||
|
}
|
||||||
|
node.InputFiles = append(node.InputFiles, f.Name())
|
||||||
|
default:
|
||||||
|
logger.D("FIL %s", p)
|
||||||
|
if node.OtherFiles == nil {
|
||||||
|
node.OtherFiles = make([]string, 0)
|
||||||
|
}
|
||||||
|
node.OtherFiles = append(node.OtherFiles, f.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// only directorys, needed config before
|
||||||
|
for _, f := range files {
|
||||||
|
p := node.InputPath + "/" + f.Name()
|
||||||
|
if f.IsDir() {
|
||||||
|
logger.D("DIR %s", p)
|
||||||
|
newTree := new(TreeNode)
|
||||||
|
newTree.root = node.root
|
||||||
|
if node.Sub == nil {
|
||||||
|
node.Sub = make([]*TreeNode, 0)
|
||||||
|
}
|
||||||
|
node.Sub = append(node.Sub, newTree)
|
||||||
|
newTree.ReadContentDir(node.InputPath, node.OutputPath, f.Name(), node.Config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *TreeNode) processMarkdownWithHeader(md []byte, errorRef string) (*PathConfig, *pongo2.Context) {
|
||||||
|
|
||||||
|
newConfig := new(PathConfig)
|
||||||
|
|
||||||
|
headerRegex := regexp.MustCompile("(?s)^---(.*?)\\r?\\n\\r?---\\r?\\n\\r?")
|
||||||
|
yamlData := headerRegex.Find(md)
|
||||||
|
if string(yamlData) != "" {
|
||||||
|
// replace tabs
|
||||||
|
yamlData = bytes.Replace(yamlData, []byte("\t"), []byte(" "), -1)
|
||||||
|
|
||||||
|
logger.D("found yaml header in '%s', merging config", errorRef)
|
||||||
|
err := yaml.Unmarshal(yamlData, newConfig)
|
||||||
|
logger.Eexit(err, "could not parse YAML header from '%s'", errorRef)
|
||||||
|
|
||||||
|
logger.D("merging config with upper config")
|
||||||
|
oldThis := newConfig.This
|
||||||
|
helper.Merge(newConfig, node.Config)
|
||||||
|
newConfig.This = oldThis
|
||||||
|
|
||||||
|
logger.D(spew.Sdump(newConfig))
|
||||||
|
|
||||||
|
md = headerRegex.ReplaceAll(md, []byte(""))
|
||||||
|
} else {
|
||||||
|
helper.Merge(newConfig, node.Config)
|
||||||
|
}
|
||||||
|
|
||||||
|
// use --- for splitting document in markdown parts
|
||||||
|
regex := regexp.MustCompile("\\r?\\n\\r?---\\r?\\n\\r?")
|
||||||
|
inputParts := regex.Split(string(md), -1)
|
||||||
|
htmlParts := make([]*pongo2.Value, 0)
|
||||||
|
|
||||||
|
chromaRenderer := false
|
||||||
|
chromaStyle := "monokai"
|
||||||
|
if m := newConfig.Markdown; m != nil {
|
||||||
|
if m.ChromaRenderer != nil && *m.ChromaRenderer {
|
||||||
|
chromaRenderer = true
|
||||||
|
}
|
||||||
|
if m.ChromaStyle != nil && *m.ChromaStyle != "" {
|
||||||
|
chromaStyle = *m.ChromaStyle
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, iPart := range inputParts {
|
||||||
|
htmlParts = append(htmlParts,
|
||||||
|
pongo2.AsSafeValue(
|
||||||
|
string(helper.RenderMarkdown([]byte(iPart), chromaRenderer, chromaStyle))))
|
||||||
|
}
|
||||||
|
|
||||||
|
// build navigation
|
||||||
|
navMap := make(map[string]*NavElement)
|
||||||
|
navSlice := make([]*NavElement, 0)
|
||||||
|
navActive := make([]*NavElement, 0)
|
||||||
|
node.buildNavigation(&navMap, &navSlice, &navActive)
|
||||||
|
|
||||||
|
// read yaml header as data for template
|
||||||
|
ctx := NewContext()
|
||||||
|
ctx["This"] = newConfig.This
|
||||||
|
ctx["Meta"] = newConfig.Meta
|
||||||
|
ctx["Markdown"] = newConfig.Markdown
|
||||||
|
ctx["Data"] = newConfig.Data
|
||||||
|
ctx["ColMap"] = node.root.ColMap // root as NavMap and NavSlice, for sub go to NavElement.ColMap
|
||||||
|
ctx["NavMap"] = navMap
|
||||||
|
ctx["NavSlice"] = navSlice
|
||||||
|
ctx["NavActive"] = navActive
|
||||||
|
ctx["Body"] = pongo2.AsSafeValue(string(helper.RenderMarkdown(md, chromaRenderer, chromaStyle)))
|
||||||
|
ctx["BodyParts"] = htmlParts
|
||||||
|
ctx["CurrentPath"] = node.CurrentNavPath()
|
||||||
|
// set active nav element
|
||||||
|
if len(navActive) > 0 {
|
||||||
|
ctx["NavElement"] = navActive[len(navActive)-1]
|
||||||
|
} else {
|
||||||
|
// if no active path to content, we are in root dir
|
||||||
|
ctx["NavElement"] = &NavElement{
|
||||||
|
GoTo: node.BackToRootPath(),
|
||||||
|
Active: true,
|
||||||
|
ColMap: node.ColMap,
|
||||||
|
Data: node.Config.Data,
|
||||||
|
This: node.Config.This,
|
||||||
|
SubMap: &navMap,
|
||||||
|
SubSlice: &navSlice,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newConfig, &ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProcessContent walks recursivly through the input paths and processes all files for output
|
||||||
|
func (node *TreeNode) ProcessContent() {
|
||||||
|
progress.DescribeCurrent("content dir", "processing "+node.InputPath)
|
||||||
|
|
||||||
|
helper.CreateDirectory(node.OutputPath)
|
||||||
|
|
||||||
|
if node.root != node {
|
||||||
|
// write htaccess for rewrites, root will be written in WriteWebserverConfig()
|
||||||
|
goTo := node.Config.This.GoTo
|
||||||
|
if goTo != nil && *goTo != "" {
|
||||||
|
goToFixed := *goTo
|
||||||
|
if strings.HasPrefix(goToFixed, "/") {
|
||||||
|
goToFixed = node.BackToRootPath() + goToFixed
|
||||||
|
}
|
||||||
|
goToFixed = path.Clean(goToFixed)
|
||||||
|
|
||||||
|
htaccessRedirect(node.OutputPath, goToFixed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, file := range node.InputFiles {
|
||||||
|
inFile := "InputString"
|
||||||
|
|
||||||
|
// ignore ???
|
||||||
|
ignoreFile := false
|
||||||
|
if file != "" {
|
||||||
|
inFile = node.InputPath + "/" + file
|
||||||
|
var ignoreRegex *string
|
||||||
|
if f := node.Config.Filename; f != nil {
|
||||||
|
ignoreRegex = f.Ignore
|
||||||
|
}
|
||||||
|
if ignoreRegex != nil && *ignoreRegex != "" {
|
||||||
|
regex, err := regexp.Compile(*ignoreRegex)
|
||||||
|
logger.Eexit(err, "could not compile filename.ignore regexp '%s' for file '%s'", *ignoreRegex, inFile)
|
||||||
|
ignoreFile = regex.MatchString(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ignoreFile {
|
||||||
|
logger.I("ignoring file '%s', because of filename.ignore", inFile)
|
||||||
|
} else {
|
||||||
|
var input []byte
|
||||||
|
|
||||||
|
if file != "" {
|
||||||
|
logger.D("reading file: %s", inFile)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
input, err = ioutil.ReadFile(inFile)
|
||||||
|
logger.Eexit(err, "could not read '%s'", inFile)
|
||||||
|
|
||||||
|
logger.I("processing input file '%s'", inFile)
|
||||||
|
} else {
|
||||||
|
// use input string if available and input filename == ""
|
||||||
|
var inputString *string
|
||||||
|
if i := node.Config.Index; i != nil {
|
||||||
|
inputString = i.InputString
|
||||||
|
}
|
||||||
|
if inputString != nil {
|
||||||
|
logger.D("using input string instead of file")
|
||||||
|
input = []byte(*inputString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newConfig, ctx := node.processMarkdownWithHeader(input, inFile)
|
||||||
|
|
||||||
|
// build output filename
|
||||||
|
outputFilename := file
|
||||||
|
|
||||||
|
var stripRegex *string
|
||||||
|
var outputExt *string
|
||||||
|
if f := newConfig.Filename; f != nil {
|
||||||
|
stripRegex = f.Strip
|
||||||
|
outputExt = f.OutputExtension
|
||||||
|
}
|
||||||
|
|
||||||
|
var indexInputFile *string
|
||||||
|
var indexOutputFile *string
|
||||||
|
if i := newConfig.Index; i != nil {
|
||||||
|
indexInputFile = i.InputFile
|
||||||
|
indexOutputFile = i.OutputFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexInputFile != nil &&
|
||||||
|
*indexInputFile == file &&
|
||||||
|
indexOutputFile != nil &&
|
||||||
|
*indexOutputFile != "" {
|
||||||
|
outputFilename = *indexOutputFile
|
||||||
|
} else {
|
||||||
|
if stripRegex != nil && *stripRegex != "" {
|
||||||
|
regex, err := regexp.Compile(*stripRegex)
|
||||||
|
logger.Eexit(err, "could not compile filename.strip regexp '%s' for file '%s'", *stripRegex, inFile)
|
||||||
|
|
||||||
|
outputFilename = regex.ReplaceAllString(outputFilename, "$1")
|
||||||
|
}
|
||||||
|
if outputExt != nil && *outputExt != "" {
|
||||||
|
outputFilename += "." + *outputExt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
outFile := node.OutputPath + "/" + outputFilename
|
||||||
|
logger.D("using '%s' as output file", outFile)
|
||||||
|
logger.D("rendering template '%s' for '%s'", *newConfig.Template, outFile)
|
||||||
|
templateFilename := *newConfig.Template
|
||||||
|
result, err := renderTemplate(*newConfig.Template, node, newConfig, ctx)
|
||||||
|
logger.Eexit(err, "could not execute template '%s' for input file '%s': %s", templateFilename, inFile)
|
||||||
|
|
||||||
|
result = node.fixAssetsPath(result)
|
||||||
|
|
||||||
|
logger.N("writing to output file: %s", outFile)
|
||||||
|
err = ioutil.WriteFile(outFile, []byte(result), 0644)
|
||||||
|
logger.Eexit(err, "could not write to output file '%s'", outFile)
|
||||||
|
|
||||||
|
handleCompression(outFile, []byte(result))
|
||||||
|
|
||||||
|
//fmt.Println(string(html))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// process other files, copy...
|
||||||
|
for _, file := range node.OtherFiles {
|
||||||
|
switch Config.OtherFiles.Action {
|
||||||
|
case "copy":
|
||||||
|
from := node.InputPath + "/" + file
|
||||||
|
to := node.OutputPath + "/" + file
|
||||||
|
logger.N("copying file from '%s' to '%s'", from, to)
|
||||||
|
err := cpy.Copy(from, to)
|
||||||
|
logger.Eexit(err, "could not copy file from '%s' to '%s': %s", from, to)
|
||||||
|
|
||||||
|
handleCompression(to, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.IncrDone("content dir")
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
// sub can dynamically increase, so no for range
|
||||||
|
for i < len(node.Sub) {
|
||||||
|
node.Sub[i].ProcessContent()
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
168
pkg/mark2web/context.go
Normal file
168
pkg/mark2web/context.go
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/progress"
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/extemporalgenome/slug"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CurrentContext is current pongo2 template context
|
||||||
|
var CurrentContext *pongo2.Context
|
||||||
|
|
||||||
|
// CurrentTreeNode is current node we are on while processing template
|
||||||
|
var CurrentTreeNode *TreeNode
|
||||||
|
|
||||||
|
// NewContext returns prefilled context with some functions and variables
|
||||||
|
func NewContext() pongo2.Context {
|
||||||
|
ctx := pongo2.Context{
|
||||||
|
"fnRequest": RequestFn,
|
||||||
|
"fnRender": RenderFn,
|
||||||
|
|
||||||
|
"AssetsPath": Config.Assets.ToPath,
|
||||||
|
"Timestamp": time.Now().Unix,
|
||||||
|
}
|
||||||
|
CurrentContext = &ctx
|
||||||
|
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *TreeNode) fillConfig(inBase, outBase, subDir string, conf *PathConfig) {
|
||||||
|
inPath := inBase
|
||||||
|
if subDir != "" {
|
||||||
|
inPath += "/" + subDir
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.I("reading input directory: %s", inPath)
|
||||||
|
|
||||||
|
node.InputPath = inPath
|
||||||
|
|
||||||
|
// read config
|
||||||
|
newConfig := new(PathConfig)
|
||||||
|
logger.D("looking for config.yml ...")
|
||||||
|
configFile := inPath + "/config.yml"
|
||||||
|
if _, err := os.Stat(configFile); os.IsNotExist(err) {
|
||||||
|
logger.D("no config.yml found in this directory, using upper configs")
|
||||||
|
helper.Merge(newConfig, conf)
|
||||||
|
// remove this
|
||||||
|
newConfig.This = ThisPathConfig{}
|
||||||
|
} else {
|
||||||
|
logger.D("reading config...")
|
||||||
|
data, err := ioutil.ReadFile(configFile)
|
||||||
|
logger.Eexit(err, "could not read file '%s'", configFile)
|
||||||
|
|
||||||
|
err = yaml.Unmarshal(data, newConfig)
|
||||||
|
logger.Eexit(err, "could not parse YAML file '%s'", configFile)
|
||||||
|
|
||||||
|
logger.D("merging config with upper config")
|
||||||
|
oldThis := newConfig.This
|
||||||
|
helper.Merge(newConfig, conf)
|
||||||
|
newConfig.This = oldThis
|
||||||
|
|
||||||
|
logger.D(spew.Sdump(newConfig))
|
||||||
|
}
|
||||||
|
|
||||||
|
node.Config = newConfig
|
||||||
|
|
||||||
|
// calc outDir
|
||||||
|
stripedDir := subDir
|
||||||
|
var regexStr *string
|
||||||
|
if newConfig.Path != nil {
|
||||||
|
regexStr = newConfig.Path.Strip
|
||||||
|
}
|
||||||
|
if regexStr != nil && *regexStr != "" {
|
||||||
|
if regex, err := regexp.Compile(*regexStr); err != nil {
|
||||||
|
logger.Eexit(err, "error compiling path.strip regex '%s' from '%s'", *regexStr, inBase+"/"+subDir)
|
||||||
|
} else {
|
||||||
|
stripedDir = regex.ReplaceAllString(stripedDir, "$1")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.Config.This.Navname == nil {
|
||||||
|
navname := strings.Replace(stripedDir, "_", " ", -1)
|
||||||
|
node.Config.This.Navname = &navname
|
||||||
|
}
|
||||||
|
|
||||||
|
stripedDir = slug.Slug(stripedDir)
|
||||||
|
outPath := outBase + "/" + stripedDir
|
||||||
|
outPath = path.Clean(outPath)
|
||||||
|
|
||||||
|
logger.I("calculated output directory: %s", outPath)
|
||||||
|
node.OutputPath = outPath
|
||||||
|
|
||||||
|
// handle collections
|
||||||
|
node.handleCollections()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *TreeNode) addSubNode(tplFilename, subDir string, navname string, ctx interface{}, dataMapKey string, body string, hideInNav bool) {
|
||||||
|
progress.IncrTotal("content dir")
|
||||||
|
progress.DescribeCurrent("content dir", "subdir "+node.InputPath+"/"+subDir)
|
||||||
|
|
||||||
|
newNode := new(TreeNode)
|
||||||
|
newNode.root = node.root
|
||||||
|
|
||||||
|
newPathConfig := new(PathConfig)
|
||||||
|
if dataMapKey != "" {
|
||||||
|
if newPathConfig.Data == nil {
|
||||||
|
newPathConfig.Data = make(helper.MapString)
|
||||||
|
}
|
||||||
|
// as submap in Data
|
||||||
|
newPathConfig.Data[dataMapKey] = ctx
|
||||||
|
} else if m, ok := ctx.(map[string]interface{}); ok {
|
||||||
|
// direct set data
|
||||||
|
newPathConfig.Data = m
|
||||||
|
}
|
||||||
|
|
||||||
|
mergedConfig := new(PathConfig)
|
||||||
|
err := helper.Merge(mergedConfig, node.Config)
|
||||||
|
logger.Eexit(err, "merge of path config failed")
|
||||||
|
|
||||||
|
// dont merge Data[DataKey]
|
||||||
|
if dataMapKey != "" {
|
||||||
|
mergedConfig.Data[dataMapKey] = nil
|
||||||
|
} else {
|
||||||
|
mergedConfig.Data = make(helper.MapString)
|
||||||
|
}
|
||||||
|
err = helper.Merge(mergedConfig, newPathConfig)
|
||||||
|
logger.Eexit(err, "merge of path config failed")
|
||||||
|
|
||||||
|
newNode.fillConfig(
|
||||||
|
node.InputPath,
|
||||||
|
node.OutputPath,
|
||||||
|
subDir,
|
||||||
|
mergedConfig,
|
||||||
|
)
|
||||||
|
if navname != "" {
|
||||||
|
// set after fillConfig, because it is ignored in fillConfig
|
||||||
|
newNode.Config.This.Navname = &navname
|
||||||
|
}
|
||||||
|
|
||||||
|
// fake via normal file behavior
|
||||||
|
newNode.Config.Template = &tplFilename
|
||||||
|
newNode.InputFiles = []string{""} // empty file is special for use InputString
|
||||||
|
indexInFile := ""
|
||||||
|
indexOutFile := "index.html"
|
||||||
|
if idx := newNode.Config.Index; idx != nil {
|
||||||
|
if idx.OutputFile != nil && *idx.OutputFile != "" {
|
||||||
|
indexOutFile = *idx.OutputFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
newNode.Config.Index = &IndexConfig{
|
||||||
|
InputFile: &indexInFile,
|
||||||
|
OutputFile: &indexOutFile,
|
||||||
|
InputString: &body,
|
||||||
|
}
|
||||||
|
newNode.Hidden = hideInNav
|
||||||
|
|
||||||
|
node.Sub = append(node.Sub, newNode)
|
||||||
|
}
|
31
pkg/mark2web/context_fn.go
Normal file
31
pkg/mark2web/context_fn.go
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/webrequest"
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RequestFn will make a web request and returns map[string]interface form pongo2
|
||||||
|
func RequestFn(url *pongo2.Value, args ...*pongo2.Value) *pongo2.Value {
|
||||||
|
u := url.String()
|
||||||
|
return pongo2.AsValue(webrequest.GetJSON(u))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderFn renders a pongo2 template with additional context
|
||||||
|
func RenderFn(templateFilename, subDir, ctx *pongo2.Value, param ...*pongo2.Value) *pongo2.Value {
|
||||||
|
dataMapKey := ""
|
||||||
|
body := ""
|
||||||
|
|
||||||
|
for i, p := range param {
|
||||||
|
switch i {
|
||||||
|
case 0:
|
||||||
|
dataMapKey = p.String()
|
||||||
|
case 1:
|
||||||
|
body = p.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CurrentTreeNode.addSubNode(templateFilename.String(), subDir.String(), "", ctx.Interface(), dataMapKey, body, true)
|
||||||
|
|
||||||
|
return pongo2.AsValue(nil)
|
||||||
|
}
|
96
pkg/mark2web/htaccess.go
Normal file
96
pkg/mark2web/htaccess.go
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
func htaccessRedirect(outDir, goTo string) {
|
||||||
|
switch Config.Webserver.Type {
|
||||||
|
case "apache":
|
||||||
|
htaccessFile := outDir + "/.htaccess"
|
||||||
|
logger.N("writing '%s' with redirect to: %s", htaccessFile, goTo)
|
||||||
|
err := ioutil.WriteFile(htaccessFile, []byte(`RewriteEngine on
|
||||||
|
RewriteRule ^$ %{REQUEST_URI}`+goTo+`/ [R,L]
|
||||||
|
`), 0644)
|
||||||
|
logger.Eexit(err, "could not write '%s'", htaccessFile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteWebserverConfig build the config for pre compression and more
|
||||||
|
func (tree *TreeNode) WriteWebserverConfig() {
|
||||||
|
goTo := ""
|
||||||
|
if g := tree.Config.This.GoTo; g != nil && *g != "" {
|
||||||
|
goTo = strings.TrimPrefix(*g, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch Config.Webserver.Type {
|
||||||
|
case "apache":
|
||||||
|
configStr := `
|
||||||
|
RewriteEngine on
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
if goTo != "" {
|
||||||
|
configStr += `
|
||||||
|
RewriteRule ^$ %{REQUEST_URI}` + goTo + `/ [R,L]
|
||||||
|
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
configStr += `
|
||||||
|
AddCharset UTF-8 .html
|
||||||
|
AddCharset UTF-8 .json
|
||||||
|
AddCharset UTF-8 .js
|
||||||
|
AddCharset UTF-8 .css
|
||||||
|
|
||||||
|
RemoveLanguage .br
|
||||||
|
|
||||||
|
<IfModule mod_headers.c>
|
||||||
|
`
|
||||||
|
|
||||||
|
rewriteMacro := func(e, c, x, xx string) string {
|
||||||
|
return `
|
||||||
|
|
||||||
|
######` + e + `.` + x + `
|
||||||
|
|
||||||
|
RewriteCond "%{HTTP:Accept-encoding}" "` + xx + `"
|
||||||
|
RewriteCond "%{REQUEST_FILENAME}\.` + x + `" -s
|
||||||
|
RewriteRule "^(.*)` + e + `" "$1` + e + `\.` + x + `" [QSA]
|
||||||
|
|
||||||
|
RewriteRule "` + e + `\.` + x + `$" "-" [E=no-gzip:1,E=no-brotli]
|
||||||
|
|
||||||
|
<FilesMatch "` + e + `\.` + x + `$">
|
||||||
|
ForceType '` + c + `; charset=UTF-8'
|
||||||
|
Header append Content-Encoding ` + xx + `
|
||||||
|
Header append Vary Accept-Encoding
|
||||||
|
</FilesMatch>
|
||||||
|
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
for ext, contentType := range Config.Compress.Extensions {
|
||||||
|
rExt := regexp.QuoteMeta(ext)
|
||||||
|
if brotliSupported && Config.Compress.Brotli {
|
||||||
|
configStr += rewriteMacro(rExt, contentType, "br", "br")
|
||||||
|
}
|
||||||
|
if Config.Compress.GZIP {
|
||||||
|
configStr += rewriteMacro(rExt, contentType, "gz", "gzip")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
configStr += `
|
||||||
|
</IfModule>
|
||||||
|
`
|
||||||
|
|
||||||
|
if configStr != "" {
|
||||||
|
htaccessFile := Config.Directories.Output + "/.htaccess"
|
||||||
|
logger.N("writing webserver config to: %s", htaccessFile)
|
||||||
|
err := ioutil.WriteFile(htaccessFile, []byte(configStr), 0644)
|
||||||
|
logger.Eexit(err, "could not write '%s'", htaccessFile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,11 +1,12 @@
|
|||||||
package helper
|
package mark2web
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitbase.de/apairon/mark2web/config"
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NavElement is one element with ist attributes and subs
|
// NavElement is one element with ist attributes and subs
|
||||||
@ -14,19 +15,23 @@ type NavElement struct {
|
|||||||
GoTo string
|
GoTo string
|
||||||
Active bool
|
Active bool
|
||||||
|
|
||||||
ColMap config.MapString
|
ColMap helper.MapString
|
||||||
|
|
||||||
Data interface{}
|
Data interface{}
|
||||||
|
|
||||||
This config.ThisPathConfig
|
This ThisPathConfig
|
||||||
|
|
||||||
SubMap *map[string]*NavElement
|
SubMap *map[string]*NavElement
|
||||||
SubSlice *[]*NavElement
|
SubSlice *[]*NavElement
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildNavigation builds the navigation trees for use in templates
|
// buildNavigation builds the navigation trees for use in templates
|
||||||
func BuildNavigation(conf *config.PathConfigTree, curNavMap *map[string]*NavElement, curNavSlice *[]*NavElement, navActive *[]*NavElement, activeNav string) {
|
func (node *TreeNode) buildNavigation(curNavMap *map[string]*NavElement, curNavSlice *[]*NavElement, navActive *[]*NavElement) {
|
||||||
for _, el := range conf.Sub {
|
buildNavigationRecursive(node.root, curNavMap, curNavSlice, navActive, node.CurrentNavPath(), node.BackToRootPath())
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildNavigationRecursive(tree *TreeNode, curNavMap *map[string]*NavElement, curNavSlice *[]*NavElement, navActive *[]*NavElement, activeNav string, backToRoot string) {
|
||||||
|
for _, el := range tree.Sub {
|
||||||
if el.Hidden {
|
if el.Hidden {
|
||||||
continue // ignore hidden nav points from collections
|
continue // ignore hidden nav points from collections
|
||||||
}
|
}
|
||||||
@ -37,21 +42,20 @@ func BuildNavigation(conf *config.PathConfigTree, curNavMap *map[string]*NavElem
|
|||||||
}
|
}
|
||||||
if ignNav != nil && *ignNav != "" {
|
if ignNav != nil && *ignNav != "" {
|
||||||
regex, err := regexp.Compile(*ignNav)
|
regex, err := regexp.Compile(*ignNav)
|
||||||
if err != nil {
|
logger.Eexit(err, "could not compile IngoreForNav regexp '%s' in '%s'", *ignNav, el.InputPath)
|
||||||
Log.Panicf("could not compile IngoreForNav regexp '%s' in '%s': %s", *ignNav, el.InputPath, err)
|
|
||||||
}
|
|
||||||
if regex.MatchString(path.Base(el.InputPath)) {
|
if regex.MatchString(path.Base(el.InputPath)) {
|
||||||
Log.Debugf("ignoring input directory '%s' in navigation", el.InputPath)
|
logger.D("ignoring input directory '%s' in navigation", el.InputPath)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
elPath := strings.TrimPrefix(el.OutputPath, config.Config.Directories.Output+"/")
|
elPath := strings.TrimPrefix(el.OutputPath, Config.Directories.Output+"/")
|
||||||
|
|
||||||
subMap := make(map[string]*NavElement)
|
subMap := make(map[string]*NavElement)
|
||||||
subSlice := make([]*NavElement, 0)
|
subSlice := make([]*NavElement, 0)
|
||||||
navEl := NavElement{
|
navEl := NavElement{
|
||||||
Active: strings.HasPrefix(activeNav, elPath),
|
Active: strings.HasPrefix(activeNav, elPath+"/") || activeNav == elPath,
|
||||||
Data: el.Config.Data,
|
Data: el.Config.Data,
|
||||||
ColMap: el.ColMap,
|
ColMap: el.ColMap,
|
||||||
SubMap: &subMap,
|
SubMap: &subMap,
|
||||||
@ -88,8 +92,7 @@ func BuildNavigation(conf *config.PathConfigTree, curNavMap *map[string]*NavElem
|
|||||||
|
|
||||||
if activeNav != "" && activeNav != "/" {
|
if activeNav != "" && activeNav != "/" {
|
||||||
// calculate relative path
|
// calculate relative path
|
||||||
bToRoot := BackToRoot(activeNav)
|
navEl.GoTo = backToRoot + navEl.GoTo
|
||||||
navEl.GoTo = bToRoot + navEl.GoTo
|
|
||||||
navEl.GoTo = path.Clean(navEl.GoTo)
|
navEl.GoTo = path.Clean(navEl.GoTo)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,6 +101,6 @@ func BuildNavigation(conf *config.PathConfigTree, curNavMap *map[string]*NavElem
|
|||||||
*curNavSlice = append(*curNavSlice, &navEl)
|
*curNavSlice = append(*curNavSlice, &navEl)
|
||||||
}
|
}
|
||||||
|
|
||||||
BuildNavigation(el, &subMap, &subSlice, navActive, activeNav)
|
buildNavigationRecursive(el, &subMap, &subSlice, navActive, activeNav, backToRoot)
|
||||||
}
|
}
|
||||||
}
|
}
|
60
pkg/mark2web/path.go
Normal file
60
pkg/mark2web/path.go
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ResolveNavPath fixes nav target relative to current navigation path
|
||||||
|
func (node *TreeNode) ResolveNavPath(target string) string {
|
||||||
|
if strings.HasPrefix(target, "/") {
|
||||||
|
target = node.BackToRootPath() + target
|
||||||
|
}
|
||||||
|
target = path.Clean(target)
|
||||||
|
return target
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveOutputPath fixes output directory relative to current navigation path
|
||||||
|
func (node *TreeNode) ResolveOutputPath(target string) string {
|
||||||
|
if strings.HasPrefix(target, "/") {
|
||||||
|
target = Config.Directories.Output + "/" + target
|
||||||
|
} else {
|
||||||
|
target = node.OutputPath + "/" + target
|
||||||
|
}
|
||||||
|
return path.Clean(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveInputPath fixes input directory relative to current navigation path
|
||||||
|
func (node *TreeNode) ResolveInputPath(target string) string {
|
||||||
|
if strings.HasPrefix(target, "/") {
|
||||||
|
target = Config.Directories.Input + "/" + target
|
||||||
|
} else {
|
||||||
|
target = node.InputPath + "/" + target
|
||||||
|
}
|
||||||
|
return path.Clean(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CurrentNavPath is current navigation path for this node
|
||||||
|
func (node *TreeNode) CurrentNavPath() string {
|
||||||
|
curNavPath := strings.TrimPrefix(node.OutputPath, Config.Directories.Output)
|
||||||
|
curNavPath = strings.TrimPrefix(curNavPath, "/")
|
||||||
|
curNavPath = path.Clean(curNavPath)
|
||||||
|
if curNavPath == "." {
|
||||||
|
curNavPath = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return curNavPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackToRootPath builds ../../ string
|
||||||
|
func (node *TreeNode) BackToRootPath() string {
|
||||||
|
curNavPath := node.CurrentNavPath()
|
||||||
|
|
||||||
|
tmpPath := ""
|
||||||
|
if curNavPath != "" {
|
||||||
|
for i := strings.Count(curNavPath, "/") + 1; i > 0; i-- {
|
||||||
|
tmpPath += "../"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tmpPath
|
||||||
|
}
|
33
pkg/mark2web/render.go
Normal file
33
pkg/mark2web/render.go
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/flosch/pongo2/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
var templateCache = make(map[string]*pongo2.Template)
|
||||||
|
var templateDir string
|
||||||
|
|
||||||
|
// SetTemplateDir sets base directory for searching template files
|
||||||
|
func SetTemplateDir(dir string) {
|
||||||
|
templateDir = dir
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderTemplate renders a pongo2 template with context
|
||||||
|
func renderTemplate(filename string, node *TreeNode, pathConfig *PathConfig, ctx *pongo2.Context) (string, error) {
|
||||||
|
CurrentContext = ctx
|
||||||
|
CurrentTreeNode = node
|
||||||
|
templateFile := templateDir + "/" + filename
|
||||||
|
template := templateCache[templateFile]
|
||||||
|
if template == nil {
|
||||||
|
var err error
|
||||||
|
if template, err = pongo2.FromFile(templateFile); err != nil {
|
||||||
|
log.Panicf("could not parse template '%s': %s", templateFile, err)
|
||||||
|
} else {
|
||||||
|
templateCache[templateFile] = template
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return template.Execute(*ctx)
|
||||||
|
}
|
18
pkg/mark2web/run.go
Normal file
18
pkg/mark2web/run.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import "gitbase.de/apairon/mark2web/pkg/jobm"
|
||||||
|
|
||||||
|
// Run will do a complete run of mark2web
|
||||||
|
func Run(inDir, outDir string, defaultPathConfig *PathConfig) {
|
||||||
|
SetTemplateDir(inDir + "/templates")
|
||||||
|
|
||||||
|
tree := new(TreeNode)
|
||||||
|
tree.ReadContentDir(inDir+"/content", outDir, "", defaultPathConfig)
|
||||||
|
tree.ProcessContent()
|
||||||
|
|
||||||
|
ProcessAssets()
|
||||||
|
|
||||||
|
tree.WriteWebserverConfig()
|
||||||
|
|
||||||
|
jobm.Wait()
|
||||||
|
}
|
21
pkg/mark2web/tree.go
Normal file
21
pkg/mark2web/tree.go
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
package mark2web
|
||||||
|
|
||||||
|
import "gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
|
||||||
|
// TreeNode is complete config tree of content dir
|
||||||
|
type TreeNode struct {
|
||||||
|
InputPath string
|
||||||
|
OutputPath string
|
||||||
|
Hidden bool // for collections which are not part of the navigation
|
||||||
|
|
||||||
|
ColMap helper.MapString
|
||||||
|
|
||||||
|
InputFiles []string
|
||||||
|
OtherFiles []string
|
||||||
|
|
||||||
|
Config *PathConfig
|
||||||
|
Sub []*TreeNode
|
||||||
|
|
||||||
|
root *TreeNode // shows always to root of tree
|
||||||
|
parent *TreeNode
|
||||||
|
}
|
96
pkg/progress/bar.go
Normal file
96
pkg/progress/bar.go
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
package progress
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/helper"
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
"github.com/gosuri/uiprogress"
|
||||||
|
"github.com/mattn/go-tty"
|
||||||
|
)
|
||||||
|
|
||||||
|
type bar struct {
|
||||||
|
Bar *uiprogress.Bar
|
||||||
|
Description string
|
||||||
|
}
|
||||||
|
|
||||||
|
var bars = make(map[string]*bar)
|
||||||
|
var initialized = false
|
||||||
|
var terminalWidth = 80
|
||||||
|
|
||||||
|
// OverallTotal is number of total jobs
|
||||||
|
var OverallTotal = 0
|
||||||
|
|
||||||
|
// OverallDone is number of done jobs
|
||||||
|
var OverallDone = 0
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
updateLoggerPrefix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start initializes the bar drawing
|
||||||
|
func Start() {
|
||||||
|
if t, err := tty.Open(); err == nil && t != nil {
|
||||||
|
terminalWidth, _, _ = t.Size()
|
||||||
|
t.Close()
|
||||||
|
}
|
||||||
|
uiprogress.Start() // start rendering
|
||||||
|
initialized = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateLoggerPrefix() {
|
||||||
|
logger.Prefix = fmt.Sprintf("%3d/%3d: ", OverallDone, OverallTotal)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncrTotal increases the total jobs for the bar
|
||||||
|
func IncrTotal(barname string) {
|
||||||
|
OverallTotal++
|
||||||
|
updateLoggerPrefix()
|
||||||
|
|
||||||
|
if initialized {
|
||||||
|
_bar := bars[barname]
|
||||||
|
if _bar == nil {
|
||||||
|
_bar = new(bar)
|
||||||
|
_bar.Bar = uiprogress.AddBar(1)
|
||||||
|
_bar.Bar.Width = 25
|
||||||
|
|
||||||
|
_bar.Bar.PrependFunc(func(b *uiprogress.Bar) string {
|
||||||
|
return fmt.Sprintf("%15s: %3d/%3d", helper.ShortenStringLeft(barname, 15), b.Current(), b.Total)
|
||||||
|
})
|
||||||
|
_bar.Bar.AppendFunc(func(b *uiprogress.Bar) string {
|
||||||
|
return fmt.Sprintf("%s", helper.ShortenStringLeft(_bar.Description, terminalWidth-80))
|
||||||
|
})
|
||||||
|
|
||||||
|
bars[barname] = _bar
|
||||||
|
} else {
|
||||||
|
_bar.Bar.Total++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncrDone increases to done jobs counter
|
||||||
|
func IncrDone(barname string) {
|
||||||
|
OverallDone++
|
||||||
|
updateLoggerPrefix()
|
||||||
|
|
||||||
|
if initialized {
|
||||||
|
bars[barname].Bar.Incr()
|
||||||
|
bars[barname].Description = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DescribeCurrent describes the current job
|
||||||
|
func DescribeCurrent(barname, description string) {
|
||||||
|
if initialized {
|
||||||
|
bars[barname].Description = description
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop stops the bar drawing
|
||||||
|
func Stop() {
|
||||||
|
if initialized {
|
||||||
|
time.Sleep(time.Millisecond * 200)
|
||||||
|
uiprogress.Stop()
|
||||||
|
}
|
||||||
|
}
|
109
pkg/webrequest/request.go
Normal file
109
pkg/webrequest/request.go
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
package webrequest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"image"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/progress"
|
||||||
|
|
||||||
|
"gitbase.de/apairon/mark2web/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type wrImageEntry struct {
|
||||||
|
img image.Image
|
||||||
|
format string
|
||||||
|
}
|
||||||
|
|
||||||
|
type wrJSONEntry struct {
|
||||||
|
data interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var wrImageCache = make(map[string]*wrImageEntry)
|
||||||
|
var wrJSONCache = make(map[string]*wrJSONEntry)
|
||||||
|
|
||||||
|
// Get will fetch an url and returns reponse
|
||||||
|
func Get(url string, opts interface{}) (resp *http.Response, err error) {
|
||||||
|
logger.N("requesting url via GET %s", url)
|
||||||
|
|
||||||
|
progress.IncrTotal("web request")
|
||||||
|
progress.DescribeCurrent("web request", url)
|
||||||
|
resp, err = http.Get(url)
|
||||||
|
progress.IncrDone("web request")
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJSON will GET a json object/array from a given URL
|
||||||
|
func GetJSON(url string) interface{} {
|
||||||
|
cached := wrJSONCache[url]
|
||||||
|
if cached == nil {
|
||||||
|
resp, err := Get(url, nil)
|
||||||
|
logger.Eexit(err, "could not get url '%s'", url)
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
logger.Eexit(err, "could not read body from url '%s'", url)
|
||||||
|
|
||||||
|
logger.D("output from url '%s':\n%s", url, string(body))
|
||||||
|
|
||||||
|
if resp.StatusCode >= 400 {
|
||||||
|
logger.Exit("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
|
||||||
|
}
|
||||||
|
|
||||||
|
contentType := resp.Header.Get("Content-Type")
|
||||||
|
|
||||||
|
if strings.Contains(contentType, "json") {
|
||||||
|
|
||||||
|
} else {
|
||||||
|
logger.Exit("is not json '%s' from url '%s'", contentType, url)
|
||||||
|
}
|
||||||
|
|
||||||
|
cached = new(wrJSONEntry)
|
||||||
|
|
||||||
|
jsonMap := make(map[string]interface{})
|
||||||
|
err = json.Unmarshal(body, &jsonMap)
|
||||||
|
if err == nil {
|
||||||
|
cached.data = jsonMap
|
||||||
|
} else {
|
||||||
|
jsonArrayMap := make([]map[string]interface{}, 0)
|
||||||
|
err = json.Unmarshal(body, &jsonArrayMap)
|
||||||
|
if err == nil {
|
||||||
|
cached.data = jsonArrayMap
|
||||||
|
} else {
|
||||||
|
logger.Exit("could not read json from '%s': invalid type", url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
wrJSONCache[url] = cached
|
||||||
|
|
||||||
|
}
|
||||||
|
return cached.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetImage gets an image from an url
|
||||||
|
func GetImage(url string) (image.Image, string, error) {
|
||||||
|
cached := wrImageCache[url]
|
||||||
|
if cached == nil {
|
||||||
|
resp, err := Get(url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", fmt.Errorf("could not get url '%s': %s", url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
img, format, err := image.Decode(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", fmt.Errorf("could read body from url '%s': %s", url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cached = &wrImageEntry{
|
||||||
|
img: img,
|
||||||
|
format: format,
|
||||||
|
}
|
||||||
|
|
||||||
|
wrImageCache[url] = cached
|
||||||
|
}
|
||||||
|
|
||||||
|
return cached.img, cached.format, nil
|
||||||
|
}
|
7
scripts/build.sh
Executable file
7
scripts/build.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
curdir=$(dirname $0)
|
||||||
|
distdir="$curdir/../dist"
|
||||||
|
|
||||||
|
mkdir -p "$distdir"
|
||||||
|
go build -v -ldflags "-X main.Version=`git describe --tags --long` -X main.GitHash=`git rev-parse HEAD` -X main.BuildTime=`date -u '+%Y-%m-%d_%I:%M:%S%p'`" -o "$distdir/mark2web-`cat $curdir/../build/VERSION`-${GOOS}-${GOARCH}${FILEEXT}" "$curdir/../cmd/mark2web/main.go"
|
BIN
test/in/img/test.jpg
Normal file
BIN
test/in/img/test.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 234 KiB |
@ -2,6 +2,7 @@ GET https://mark2web.basiscms.de/api/collections/get/mark2webBlog
|
|||||||
?sort[date]=-1
|
?sort[date]=-1
|
||||||
&limit=101
|
&limit=101
|
||||||
&token=985cee34099f4d3b08f18fc22f6296
|
&token=985cee34099f4d3b08f18fc22f6296
|
||||||
|
&filter[link][$exists]=0
|
||||||
|
|
||||||
###
|
###
|
||||||
|
|
1
tmp/build-errors.log
Normal file
1
tmp/build-errors.log
Normal file
@ -0,0 +1 @@
|
|||||||
|
exit status 1exit status 1exit status 1
|
1
vendor/github.com/Depado/bfchroma
generated
vendored
1
vendor/github.com/Depado/bfchroma
generated
vendored
@ -1 +0,0 @@
|
|||||||
Subproject commit ab17b511206de6d3592668ad718840a1e8f8283a
|
|
51
vendor/github.com/Depado/bfchroma/.drone.yml
generated
vendored
Normal file
51
vendor/github.com/Depado/bfchroma/.drone.yml
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: default
|
||||||
|
type: docker
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: test
|
||||||
|
image: golang:1.13.1
|
||||||
|
volumes:
|
||||||
|
- name: deps
|
||||||
|
path: /go
|
||||||
|
commands:
|
||||||
|
- go test -race -coverprofile=coverage.txt -covermode=atomic
|
||||||
|
|
||||||
|
- name: linter
|
||||||
|
image: golang:1.13.1
|
||||||
|
volumes:
|
||||||
|
- name: deps
|
||||||
|
path: /go
|
||||||
|
commands:
|
||||||
|
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s v1.20.0
|
||||||
|
- ./bin/golangci-lint run
|
||||||
|
|
||||||
|
- name: coverage
|
||||||
|
image: plugins/codecov
|
||||||
|
settings:
|
||||||
|
token:
|
||||||
|
from_secret: codecov_token
|
||||||
|
files:
|
||||||
|
- coverage.txt
|
||||||
|
|
||||||
|
- name: telegram
|
||||||
|
image: appleboy/drone-telegram
|
||||||
|
settings:
|
||||||
|
to: 790376882
|
||||||
|
format: markdown
|
||||||
|
token:
|
||||||
|
from_secret: telegram_token
|
||||||
|
message: >
|
||||||
|
*{{repo.name}}*
|
||||||
|
[Build {{build.number}}]({{build.link}}) by {{commit.author}} {{#success build.status}}succeeded{{else}}failed{{/success}} in {{buildtime build.started}}
|
||||||
|
`{{truncate commit.sha 8}}`: "{{commit.message}}"
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
- failure
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- name: deps
|
||||||
|
host:
|
||||||
|
path: /var/lib/cache/godeps/
|
17
vendor/github.com/Depado/bfchroma/.gitignore
generated
vendored
Normal file
17
vendor/github.com/Depado/bfchroma/.gitignore
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
|
# Test binary, build with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
||||||
|
.glide/
|
||||||
|
vendor/
|
||||||
|
coverage.txt
|
||||||
|
|
16
vendor/github.com/Depado/bfchroma/.golangci.yml
generated
vendored
Normal file
16
vendor/github.com/Depado/bfchroma/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
run:
|
||||||
|
timeout: 5m
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- megacheck
|
||||||
|
- govet
|
||||||
|
disable:
|
||||||
|
- errcheck
|
||||||
|
- maligned
|
||||||
|
- prealloc
|
||||||
|
disable-all: false
|
||||||
|
presets:
|
||||||
|
- bugs
|
||||||
|
- unused
|
||||||
|
fast: false
|
21
vendor/github.com/Depado/bfchroma/LICENSE
generated
vendored
Normal file
21
vendor/github.com/Depado/bfchroma/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2017
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
209
vendor/github.com/Depado/bfchroma/README.md
generated
vendored
Normal file
209
vendor/github.com/Depado/bfchroma/README.md
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
# bfchroma
|
||||||
|
|
||||||
|
[![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com)[![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com)
|
||||||
|
|
||||||
|
![Go Version](https://img.shields.io/badge/go-1.13-brightgreen.svg)
|
||||||
|
[![Go Report Card](https://goreportcard.com/badge/github.com/Depado/bfchroma)](https://goreportcard.com/report/github.com/Depado/bfchroma)
|
||||||
|
[![Build Status](https://drone.depado.eu/api/badges/Depado/bfchroma/status.svg)](https://drone.depado.eu/Depado/bfchroma)
|
||||||
|
[![codecov](https://codecov.io/gh/Depado/bfchroma/branch/master/graph/badge.svg)](https://codecov.io/gh/Depado/bfchroma)
|
||||||
|
[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/Depado/bfchroma/blob/master/LICENSE)
|
||||||
|
[![Godoc](https://godoc.org/github.com/Depado/bfchroma?status.svg)](https://godoc.org/github.com/Depado/bfchroma)
|
||||||
|
[![Sourcegraph](https://sourcegraph.com/github.com/Depado/bfchroma/-/badge.svg)](https://sourcegraph.com/github.com/Depado/bfchroma?badge)
|
||||||
|
[![Say Thanks!](https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg)](https://saythanks.io/to/Depado)
|
||||||
|
|
||||||
|
|
||||||
|
Integrating [Chroma](https://github.com/alecthomas/chroma) syntax highlighter as
|
||||||
|
a [Blackfriday](https://github.com/russross/blackfriday) renderer.
|
||||||
|
|
||||||
|
## Install and prerequisites
|
||||||
|
|
||||||
|
This project requires and uses the `v2` version of
|
||||||
|
[Blackfriday](https://github.com/russross/blackfriday/tree/v2).
|
||||||
|
|
||||||
|
```
|
||||||
|
$ go get -u github.com/Depado/bfchroma
|
||||||
|
```
|
||||||
|
|
||||||
|
_This project uses the module approach of go 1.11_
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
This renderer integrates chroma to highlight code with triple backtick notation.
|
||||||
|
It will try to use the given language when available otherwise it will try to
|
||||||
|
detect the language. If none of these two method works it will fallback to sane
|
||||||
|
defaults.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
bfchroma uses the functional options approach so you can customize the behavior
|
||||||
|
of the renderer. It uses sane defaults when no option is passed so you can use
|
||||||
|
the renderer simply by doing so :
|
||||||
|
|
||||||
|
```go
|
||||||
|
html := bf.Run([]byte(md), bf.WithRenderer(bfchroma.NewRenderer()))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
- `Style(s string)`
|
||||||
|
Define the style used by chroma for the rendering. The full list can be found [here](https://github.com/alecthomas/chroma/tree/master/styles)
|
||||||
|
- `ChromaStyle(*chroma.Style)`
|
||||||
|
This option can be used to passe directly a `*chroma.Style` instead of the
|
||||||
|
string representing the style as with the `Style(string)` option.
|
||||||
|
- `WithoutAutodetect()`
|
||||||
|
By default when no language information is written in the code block, this
|
||||||
|
renderer will try to auto-detect the used language. This option disables
|
||||||
|
this behavior and will fallback to a sane default when no language
|
||||||
|
information is available.
|
||||||
|
- `EmbedCSS()`
|
||||||
|
This option will embed CSS needed for chroma's `html.WithClasses()` at the beginning of blackfriday document.
|
||||||
|
CSS can also be extracted separately by calling `Renderer`'s.`ChromaCSS(w)` method, which will return styleshet for currently set style
|
||||||
|
- `Extend(bf.Renderer)`
|
||||||
|
This option allows to define the base blackfriday that will be extended.
|
||||||
|
- `ChromaOptions(...html.Option)`
|
||||||
|
This option allows you to pass Chroma's html options in the renderer. Such
|
||||||
|
options can be found [here](https://github.com/alecthomas/chroma#the-html-formatter).
|
||||||
|
|
||||||
|
### Option examples
|
||||||
|
|
||||||
|
Disabling language auto-detection and displaying line numbers
|
||||||
|
|
||||||
|
```go
|
||||||
|
r := bfchroma.NewRenderer(
|
||||||
|
bfchroma.WithoutAutodetect(),
|
||||||
|
bfchroma.ChromaOptions(html.WithLineNumbers()),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Extend a blackfriday renderer
|
||||||
|
|
||||||
|
```go
|
||||||
|
b := bf.NewHTMLRenderer(bf.HTMLRendererParameters{
|
||||||
|
Flags: bf.CommonHTMLFlags,
|
||||||
|
})
|
||||||
|
|
||||||
|
r := bfchroma.NewRenderer(bfchroma.Extend(b))
|
||||||
|
```
|
||||||
|
|
||||||
|
Use a different style
|
||||||
|
|
||||||
|
```go
|
||||||
|
r := bfchroma.NewRenderer(bfchroma.Style("dracula"))
|
||||||
|
// Or
|
||||||
|
r = bfchroma.NewRenderer(bfchroma.ChromaStyle(styles.Dracula))
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/Depado/bfchroma"
|
||||||
|
|
||||||
|
bf "github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var md = "This is some sample code.\n\n```go\n" +
|
||||||
|
`func main() {
|
||||||
|
fmt.Println("Hi")
|
||||||
|
}
|
||||||
|
` + "```"
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
html := bf.Run([]byte(md), bf.WithRenderer(bfchroma.NewRenderer()))
|
||||||
|
fmt.Println(string(html))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Will output :
|
||||||
|
|
||||||
|
```html
|
||||||
|
<p>This is some sample code.</p>
|
||||||
|
<pre style="color:#f8f8f2;background-color:#272822"><span style="color:#66d9ef">func</span> <span style="color:#a6e22e">main</span>() {
|
||||||
|
<span style="color:#a6e22e">fmt</span>.<span style="color:#a6e22e">Println</span>(<span style="color:#e6db74">"Hi"</span>)
|
||||||
|
}
|
||||||
|
</pre>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Real-life example
|
||||||
|
|
||||||
|
In [smallblog](https://github.com/Depado/smallblog) I'm using bfchroma to render
|
||||||
|
my articles. It's using a combination of both bfchroma's options and blackfriday
|
||||||
|
extensions and flags.
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Depado/bfchroma"
|
||||||
|
"github.com/alecthomas/chroma/formatters/html"
|
||||||
|
bf "github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Defines the extensions that are used
|
||||||
|
var exts = bf.NoIntraEmphasis | bf.Tables | bf.FencedCode | bf.Autolink |
|
||||||
|
bf.Strikethrough | bf.SpaceHeadings | bf.BackslashLineBreak |
|
||||||
|
bf.DefinitionLists | bf.Footnotes
|
||||||
|
|
||||||
|
// Defines the HTML rendering flags that are used
|
||||||
|
var flags = bf.UseXHTML | bf.Smartypants | bf.SmartypantsFractions |
|
||||||
|
bf.SmartypantsDashes | bf.SmartypantsLatexDashes | bf.TOC
|
||||||
|
|
||||||
|
// render will take a []byte input and will render it using a new renderer each
|
||||||
|
// time because reusing the same can mess with TOC and header IDs
|
||||||
|
func render(input []byte) []byte {
|
||||||
|
return bf.Run(
|
||||||
|
input,
|
||||||
|
bf.WithRenderer(
|
||||||
|
bfchroma.NewRenderer(
|
||||||
|
bfchroma.WithoutAutodetect(),
|
||||||
|
bfchroma.ChromaOptions(
|
||||||
|
html.WithLineNumbers(),
|
||||||
|
),
|
||||||
|
bfchroma.Extend(
|
||||||
|
bf.NewHTMLRenderer(bf.HTMLRendererParameters{
|
||||||
|
Flags: flags,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
bf.WithExtensions(exts),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Classes
|
||||||
|
|
||||||
|
If you have loads of code in your markdown, you might want to consider using
|
||||||
|
`html.WithClasses()` in your `bfchroma.ChromaOptions()`. The CSS of the style
|
||||||
|
you chose can then be accessed like this :
|
||||||
|
|
||||||
|
```go
|
||||||
|
r := bfchroma.NewRenderer(
|
||||||
|
bfchroma.WithoutAutodetect(),
|
||||||
|
bfchroma.Extend(
|
||||||
|
bf.NewHTMLRenderer(bf.HTMLRendererParameters{Flags: flags}),
|
||||||
|
),
|
||||||
|
bfchroma.Style("monokai"),
|
||||||
|
bfchroma.ChromaOptions(html.WithClasses()),
|
||||||
|
)
|
||||||
|
|
||||||
|
var css template.CSS
|
||||||
|
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
if err := r.ChromaCSS(b); err != nil {
|
||||||
|
logrus.WithError(err).Warning("Couldn't write CSS")
|
||||||
|
}
|
||||||
|
css = template.CSS(b.String())
|
||||||
|
|
||||||
|
bf.Run(input, bf.WithRenderer(r), bf.WithExtensions(exts))
|
||||||
|
```
|
||||||
|
|
||||||
|
This way, you can pass your `css` var to any template and render it along the
|
||||||
|
rendered markdown.
|
11
vendor/github.com/Depado/bfchroma/go.mod
generated
vendored
Normal file
11
vendor/github.com/Depado/bfchroma/go.mod
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
module github.com/Depado/bfchroma
|
||||||
|
|
||||||
|
go 1.14
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/alecthomas/chroma v0.7.3
|
||||||
|
github.com/alecthomas/repr v0.0.0-20200325044227-4184120f674c // indirect
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
|
||||||
|
github.com/stretchr/testify v1.6.1
|
||||||
|
)
|
46
vendor/github.com/Depado/bfchroma/go.sum
generated
vendored
Normal file
46
vendor/github.com/Depado/bfchroma/go.sum
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||||
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
|
github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI=
|
||||||
|
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||||
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||||
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
|
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||||
|
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||||
|
github.com/alecthomas/repr v0.0.0-20200325044227-4184120f674c h1:MVVbswUlqicyj8P/JljoocA7AyCo62gzD0O7jfvrhtE=
|
||||||
|
github.com/alecthomas/repr v0.0.0-20200325044227-4184120f674c/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||||
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||||
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||||
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
|
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
|
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||||
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||||
|
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||||
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
147
vendor/github.com/Depado/bfchroma/renderer.go
generated
vendored
Normal file
147
vendor/github.com/Depado/bfchroma/renderer.go
generated
vendored
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
// Package bfchroma provides an easy and extensible blackfriday renderer that
|
||||||
|
// uses the chroma syntax highlighter to render code blocks.
|
||||||
|
package bfchroma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/alecthomas/chroma"
|
||||||
|
"github.com/alecthomas/chroma/formatters/html"
|
||||||
|
"github.com/alecthomas/chroma/lexers"
|
||||||
|
"github.com/alecthomas/chroma/styles"
|
||||||
|
bf "github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Option defines the functional option type
|
||||||
|
type Option func(r *Renderer)
|
||||||
|
|
||||||
|
// Style is a function option allowing to set the style used by chroma
|
||||||
|
// Default : "monokai"
|
||||||
|
func Style(s string) Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.Style = styles.Get(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChromaStyle is an option to directly set the style of the renderer using a
|
||||||
|
// chroma style instead of a string
|
||||||
|
func ChromaStyle(s *chroma.Style) Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.Style = s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithoutAutodetect disables chroma's language detection when no codeblock
|
||||||
|
// extra information is given. It will fallback to a sane default instead of
|
||||||
|
// trying to detect the language.
|
||||||
|
func WithoutAutodetect() Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.Autodetect = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmbedCSS will embed CSS needed for html.WithClasses() in beginning of the document
|
||||||
|
func EmbedCSS() Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.embedCSS = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChromaOptions allows to pass Chroma html.Option such as Standalone()
|
||||||
|
// WithClasses(), ClassPrefix(prefix)...
|
||||||
|
func ChromaOptions(options ...html.Option) Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.ChromaOptions = options
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extend allows to specify the blackfriday renderer which is extended
|
||||||
|
func Extend(br bf.Renderer) Option {
|
||||||
|
return func(r *Renderer) {
|
||||||
|
r.Base = br
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRenderer will return a new bfchroma renderer with sane defaults
|
||||||
|
func NewRenderer(options ...Option) *Renderer {
|
||||||
|
r := &Renderer{
|
||||||
|
Base: bf.NewHTMLRenderer(bf.HTMLRendererParameters{
|
||||||
|
Flags: bf.CommonHTMLFlags,
|
||||||
|
}),
|
||||||
|
Style: styles.Monokai,
|
||||||
|
Autodetect: true,
|
||||||
|
}
|
||||||
|
for _, option := range options {
|
||||||
|
option(r)
|
||||||
|
}
|
||||||
|
r.Formatter = html.New(r.ChromaOptions...)
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderWithChroma will render the given text to the w io.Writer
|
||||||
|
func (r *Renderer) RenderWithChroma(w io.Writer, text []byte, data bf.CodeBlockData) error {
|
||||||
|
var lexer chroma.Lexer
|
||||||
|
|
||||||
|
// Determining the lexer to use
|
||||||
|
if len(data.Info) > 0 {
|
||||||
|
lexer = lexers.Get(string(data.Info))
|
||||||
|
} else if r.Autodetect {
|
||||||
|
lexer = lexers.Analyse(string(text))
|
||||||
|
}
|
||||||
|
if lexer == nil {
|
||||||
|
lexer = lexers.Fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tokenize the code
|
||||||
|
iterator, err := lexer.Tokenise(nil, string(text))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return r.Formatter.Format(w, r.Style, iterator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Renderer is a custom Blackfriday renderer that uses the capabilities of
|
||||||
|
// chroma to highlight code with triple backtick notation
|
||||||
|
type Renderer struct {
|
||||||
|
Base bf.Renderer
|
||||||
|
Autodetect bool
|
||||||
|
ChromaOptions []html.Option
|
||||||
|
Style *chroma.Style
|
||||||
|
Formatter *html.Formatter
|
||||||
|
embedCSS bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderNode satisfies the Renderer interface
|
||||||
|
func (r *Renderer) RenderNode(w io.Writer, node *bf.Node, entering bool) bf.WalkStatus {
|
||||||
|
switch node.Type {
|
||||||
|
case bf.Document:
|
||||||
|
if entering && r.embedCSS {
|
||||||
|
w.Write([]byte("<style>"))
|
||||||
|
r.Formatter.WriteCSS(w, r.Style)
|
||||||
|
w.Write([]byte("</style>"))
|
||||||
|
}
|
||||||
|
return r.Base.RenderNode(w, node, entering)
|
||||||
|
case bf.CodeBlock:
|
||||||
|
if err := r.RenderWithChroma(w, node.Literal, node.CodeBlockData); err != nil {
|
||||||
|
return r.Base.RenderNode(w, node, entering)
|
||||||
|
}
|
||||||
|
return bf.SkipChildren
|
||||||
|
default:
|
||||||
|
return r.Base.RenderNode(w, node, entering)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderHeader satisfies the Renderer interface
|
||||||
|
func (r *Renderer) RenderHeader(w io.Writer, ast *bf.Node) {
|
||||||
|
r.Base.RenderHeader(w, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderFooter satisfies the Renderer interface
|
||||||
|
func (r *Renderer) RenderFooter(w io.Writer, ast *bf.Node) {
|
||||||
|
r.Base.RenderFooter(w, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChromaCSS returns CSS used with chroma's html.WithClasses() option
|
||||||
|
func (r *Renderer) ChromaCSS(w io.Writer) error {
|
||||||
|
return r.Formatter.WriteCSS(w, r.Style)
|
||||||
|
}
|
1
vendor/github.com/alecthomas/chroma
generated
vendored
1
vendor/github.com/alecthomas/chroma
generated
vendored
@ -1 +0,0 @@
|
|||||||
Subproject commit b5ccb8dc322b914484924caf4463d601a64382f7
|
|
19
vendor/github.com/alecthomas/chroma/.gitignore
generated
vendored
Normal file
19
vendor/github.com/alecthomas/chroma/.gitignore
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
/cmd/chroma/chroma
|
||||||
|
|
||||||
|
# Test binary, build with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
||||||
|
.glide/
|
||||||
|
|
||||||
|
_models/
|
||||||
|
|
||||||
|
_examples/
|
76
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
Normal file
76
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
run:
|
||||||
|
tests: true
|
||||||
|
skip-dirs:
|
||||||
|
- _examples
|
||||||
|
|
||||||
|
output:
|
||||||
|
print-issued-lines: false
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable-all: true
|
||||||
|
disable:
|
||||||
|
- maligned
|
||||||
|
- megacheck
|
||||||
|
- lll
|
||||||
|
- gocyclo
|
||||||
|
- dupl
|
||||||
|
- gochecknoglobals
|
||||||
|
- funlen
|
||||||
|
- godox
|
||||||
|
- wsl
|
||||||
|
- gomnd
|
||||||
|
- gocognit
|
||||||
|
- goerr113
|
||||||
|
- nolintlint
|
||||||
|
- testpackage
|
||||||
|
- godot
|
||||||
|
- nestif
|
||||||
|
- paralleltest
|
||||||
|
- nlreturn
|
||||||
|
- cyclop
|
||||||
|
- exhaustivestruct
|
||||||
|
- gci
|
||||||
|
- gofumpt
|
||||||
|
- errorlint
|
||||||
|
- exhaustive
|
||||||
|
- ifshort
|
||||||
|
- wrapcheck
|
||||||
|
- stylecheck
|
||||||
|
|
||||||
|
linters-settings:
|
||||||
|
govet:
|
||||||
|
check-shadowing: true
|
||||||
|
gocyclo:
|
||||||
|
min-complexity: 10
|
||||||
|
dupl:
|
||||||
|
threshold: 100
|
||||||
|
goconst:
|
||||||
|
min-len: 8
|
||||||
|
min-occurrences: 3
|
||||||
|
forbidigo:
|
||||||
|
forbid:
|
||||||
|
- (Must)?NewLexer
|
||||||
|
exclude_godoc_examples: false
|
||||||
|
|
||||||
|
|
||||||
|
issues:
|
||||||
|
max-per-linter: 0
|
||||||
|
max-same: 0
|
||||||
|
exclude-use-default: false
|
||||||
|
exclude:
|
||||||
|
# Captured by errcheck.
|
||||||
|
- '^(G104|G204):'
|
||||||
|
# Very commonly not checked.
|
||||||
|
- 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
|
||||||
|
- 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported'
|
||||||
|
- 'composite literal uses unkeyed fields'
|
||||||
|
- 'declaration of "err" shadows declaration'
|
||||||
|
- 'should not use dot imports'
|
||||||
|
- 'Potential file inclusion via variable'
|
||||||
|
- 'should have comment or be unexported'
|
||||||
|
- 'comment on exported var .* should be of the form'
|
||||||
|
- 'at least one file in a package should have a package comment'
|
||||||
|
- 'string literal contains the Unicode'
|
||||||
|
- 'methods on the same type should have the same receiver name'
|
||||||
|
- '_TokenType_name should be _TokenTypeName'
|
||||||
|
- '`_TokenType_map` should be `_TokenTypeMap`'
|
37
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
Normal file
37
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
project_name: chroma
|
||||||
|
release:
|
||||||
|
github:
|
||||||
|
owner: alecthomas
|
||||||
|
name: chroma
|
||||||
|
brews:
|
||||||
|
-
|
||||||
|
install: bin.install "chroma"
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
builds:
|
||||||
|
- goos:
|
||||||
|
- linux
|
||||||
|
- darwin
|
||||||
|
- windows
|
||||||
|
goarch:
|
||||||
|
- arm64
|
||||||
|
- amd64
|
||||||
|
- "386"
|
||||||
|
goarm:
|
||||||
|
- "6"
|
||||||
|
dir: ./cmd/chroma
|
||||||
|
main: .
|
||||||
|
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
|
||||||
|
binary: chroma
|
||||||
|
archives:
|
||||||
|
-
|
||||||
|
format: tar.gz
|
||||||
|
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
|
||||||
|
.Arm }}{{ end }}'
|
||||||
|
files:
|
||||||
|
- COPYING
|
||||||
|
- README*
|
||||||
|
snapshot:
|
||||||
|
name_template: SNAPSHOT-{{ .Commit }}
|
||||||
|
checksum:
|
||||||
|
name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt'
|
19
vendor/github.com/alecthomas/chroma/COPYING
generated
vendored
Normal file
19
vendor/github.com/alecthomas/chroma/COPYING
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright (C) 2017 Alec Thomas
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
19
vendor/github.com/alecthomas/chroma/Makefile
generated
vendored
Normal file
19
vendor/github.com/alecthomas/chroma/Makefile
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
.PHONY: chromad upload all
|
||||||
|
|
||||||
|
VERSION ?= $(shell git describe --tags --dirty --always)
|
||||||
|
|
||||||
|
all: README.md tokentype_string.go
|
||||||
|
|
||||||
|
README.md: lexers/*/*.go
|
||||||
|
./table.py
|
||||||
|
|
||||||
|
tokentype_string.go: types.go
|
||||||
|
go generate
|
||||||
|
|
||||||
|
chromad:
|
||||||
|
rm -f chromad
|
||||||
|
(export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
|
||||||
|
|
||||||
|
upload: chromad
|
||||||
|
scp chromad root@swapoff.org: && \
|
||||||
|
ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart'
|
285
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
Normal file
285
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
# Chroma — A general purpose syntax highlighter in pure Go
|
||||||
|
[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
|
||||||
|
|
||||||
|
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
||||||
|
|
||||||
|
Chroma takes source code and other structured text and converts it into syntax
|
||||||
|
highlighted HTML, ANSI-coloured text, etc.
|
||||||
|
|
||||||
|
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
|
||||||
|
translators for Pygments lexers and styles.
|
||||||
|
|
||||||
|
<a id="markdown-table-of-contents" name="table-of-contents"></a>
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
|
||||||
|
1. [Table of Contents](#table-of-contents)
|
||||||
|
2. [Supported languages](#supported-languages)
|
||||||
|
3. [Try it](#try-it)
|
||||||
|
4. [Using the library](#using-the-library)
|
||||||
|
1. [Quick start](#quick-start)
|
||||||
|
2. [Identifying the language](#identifying-the-language)
|
||||||
|
3. [Formatting the output](#formatting-the-output)
|
||||||
|
4. [The HTML formatter](#the-html-formatter)
|
||||||
|
5. [More detail](#more-detail)
|
||||||
|
1. [Lexers](#lexers)
|
||||||
|
2. [Formatters](#formatters)
|
||||||
|
3. [Styles](#styles)
|
||||||
|
6. [Command-line interface](#command-line-interface)
|
||||||
|
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
|
||||||
|
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
<a id="markdown-supported-languages" name="supported-languages"></a>
|
||||||
|
## Supported languages
|
||||||
|
|
||||||
|
Prefix | Language
|
||||||
|
:----: | --------
|
||||||
|
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
||||||
|
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck
|
||||||
|
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
||||||
|
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
|
||||||
|
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
||||||
|
F | Factor, Fish, Forth, Fortran, FSharp
|
||||||
|
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy
|
||||||
|
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
|
||||||
|
I | Idris, Igor, INI, Io
|
||||||
|
J | J, Java, JavaScript, JSON, Julia, Jungle
|
||||||
|
K | Kotlin
|
||||||
|
L | Lighttpd configuration file, LLVM, Lua
|
||||||
|
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
|
||||||
|
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
|
||||||
|
O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode
|
||||||
|
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python
|
||||||
|
Q | QBasic
|
||||||
|
R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
|
||||||
|
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog
|
||||||
|
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
|
||||||
|
V | VB.net, verilog, VHDL, VimL, vue
|
||||||
|
W | WDTE
|
||||||
|
X | XML, Xorg
|
||||||
|
Y | YAML, YANG
|
||||||
|
Z | Zig
|
||||||
|
|
||||||
|
|
||||||
|
_I will attempt to keep this section up to date, but an authoritative list can be
|
||||||
|
displayed with `chroma --list`._
|
||||||
|
|
||||||
|
<a id="markdown-try-it" name="try-it"></a>
|
||||||
|
## Try it
|
||||||
|
|
||||||
|
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
|
||||||
|
|
||||||
|
<a id="markdown-using-the-library" name="using-the-library"></a>
|
||||||
|
## Using the library
|
||||||
|
|
||||||
|
Chroma, like Pygments, has the concepts of
|
||||||
|
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
|
||||||
|
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
|
||||||
|
[styles](https://github.com/alecthomas/chroma/tree/master/styles).
|
||||||
|
|
||||||
|
Lexers convert source text into a stream of tokens, styles specify how token
|
||||||
|
types are mapped to colours, and formatters convert tokens and styles into
|
||||||
|
formatted output.
|
||||||
|
|
||||||
|
A package exists for each of these, containing a global `Registry` variable
|
||||||
|
with all of the registered implementations. There are also helper functions
|
||||||
|
for using the registry in each package, such as looking up lexers by name or
|
||||||
|
matching filenames, etc.
|
||||||
|
|
||||||
|
In all cases, if a lexer, formatter or style can not be determined, `nil` will
|
||||||
|
be returned. In this situation you may want to default to the `Fallback`
|
||||||
|
value in each respective package, which provides sane defaults.
|
||||||
|
|
||||||
|
<a id="markdown-quick-start" name="quick-start"></a>
|
||||||
|
### Quick start
|
||||||
|
|
||||||
|
A convenience function exists that can be used to simply format some source
|
||||||
|
text, without any effort:
|
||||||
|
|
||||||
|
```go
|
||||||
|
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
|
||||||
|
```
|
||||||
|
|
||||||
|
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
|
||||||
|
### Identifying the language
|
||||||
|
|
||||||
|
To highlight code, you'll first have to identify what language the code is
|
||||||
|
written in. There are three primary ways to do that:
|
||||||
|
|
||||||
|
1. Detect the language from its filename.
|
||||||
|
|
||||||
|
```go
|
||||||
|
lexer := lexers.Match("foo.go")
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`).
|
||||||
|
|
||||||
|
```go
|
||||||
|
lexer := lexers.Get("go")
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Detect the language from its content.
|
||||||
|
|
||||||
|
```go
|
||||||
|
lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n")
|
||||||
|
```
|
||||||
|
|
||||||
|
In all cases, `nil` will be returned if the language can not be identified.
|
||||||
|
|
||||||
|
```go
|
||||||
|
if lexer == nil {
|
||||||
|
lexer = lexers.Fallback
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
At this point, it should be noted that some lexers can be extremely chatty. To
|
||||||
|
mitigate this, you can use the coalescing lexer to coalesce runs of identical
|
||||||
|
token types into a single token:
|
||||||
|
|
||||||
|
```go
|
||||||
|
lexer = chroma.Coalesce(lexer)
|
||||||
|
```
|
||||||
|
|
||||||
|
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
|
||||||
|
### Formatting the output
|
||||||
|
|
||||||
|
Once a language is identified you will need to pick a formatter and a style (theme).
|
||||||
|
|
||||||
|
```go
|
||||||
|
style := styles.Get("swapoff")
|
||||||
|
if style == nil {
|
||||||
|
style = styles.Fallback
|
||||||
|
}
|
||||||
|
formatter := formatters.Get("html")
|
||||||
|
if formatter == nil {
|
||||||
|
formatter = formatters.Fallback
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Then obtain an iterator over the tokens:
|
||||||
|
|
||||||
|
```go
|
||||||
|
contents, err := ioutil.ReadAll(r)
|
||||||
|
iterator, err := lexer.Tokenise(nil, string(contents))
|
||||||
|
```
|
||||||
|
|
||||||
|
And finally, format the tokens from the iterator:
|
||||||
|
|
||||||
|
```go
|
||||||
|
err := formatter.Format(w, style, iterator)
|
||||||
|
```
|
||||||
|
|
||||||
|
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
|
||||||
|
### The HTML formatter
|
||||||
|
|
||||||
|
By default the `html` registered formatter generates standalone HTML with
|
||||||
|
embedded CSS. More flexibility is available through the `formatters/html` package.
|
||||||
|
|
||||||
|
Firstly, the output generated by the formatter can be customised with the
|
||||||
|
following constructor options:
|
||||||
|
|
||||||
|
- `Standalone()` - generate standalone HTML with embedded CSS.
|
||||||
|
- `WithClasses()` - use classes rather than inlined style attributes.
|
||||||
|
- `ClassPrefix(prefix)` - prefix each generated CSS class.
|
||||||
|
- `TabWidth(width)` - Set the rendered tab width, in characters.
|
||||||
|
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
|
||||||
|
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
|
||||||
|
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
|
||||||
|
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
|
||||||
|
|
||||||
|
If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with:
|
||||||
|
|
||||||
|
```go
|
||||||
|
formatter := html.New(html.WithClasses())
|
||||||
|
err := formatter.WriteCSS(w, style)
|
||||||
|
```
|
||||||
|
|
||||||
|
<a id="markdown-more-detail" name="more-detail"></a>
|
||||||
|
## More detail
|
||||||
|
|
||||||
|
<a id="markdown-lexers" name="lexers"></a>
|
||||||
|
### Lexers
|
||||||
|
|
||||||
|
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
|
||||||
|
for details on implementing lexers. Most concepts apply directly to Chroma,
|
||||||
|
but see existing lexer implementations for real examples.
|
||||||
|
|
||||||
|
In many cases lexers can be automatically converted directly from Pygments by
|
||||||
|
using the included Python 3 script `pygments2chroma.py`. I use something like
|
||||||
|
the following:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
python3 _tools/pygments2chroma.py \
|
||||||
|
pygments.lexers.jvm.KotlinLexer \
|
||||||
|
> lexers/k/kotlin.go \
|
||||||
|
&& gofmt -s -w lexers/k/kotlin.go
|
||||||
|
```
|
||||||
|
|
||||||
|
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
|
||||||
|
for a list of lexers, and notes on some of the issues importing them.
|
||||||
|
|
||||||
|
<a id="markdown-formatters" name="formatters"></a>
|
||||||
|
### Formatters
|
||||||
|
|
||||||
|
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
|
||||||
|
|
||||||
|
A `noop` formatter is included that outputs the token text only, and a `tokens`
|
||||||
|
formatter outputs raw tokens. The latter is useful for debugging lexers.
|
||||||
|
|
||||||
|
<a id="markdown-styles" name="styles"></a>
|
||||||
|
### Styles
|
||||||
|
|
||||||
|
Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments.
|
||||||
|
|
||||||
|
All Pygments styles have been converted to Chroma using the `_tools/style.py` script.
|
||||||
|
|
||||||
|
When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color.
|
||||||
|
|
||||||
|
For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background:
|
||||||
|
|
||||||
|
~~~go
|
||||||
|
chroma.Background: "#f8f8f2 bg:#000000",
|
||||||
|
~~~
|
||||||
|
|
||||||
|
Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color.
|
||||||
|
|
||||||
|
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
|
||||||
|
|
||||||
|
<a id="markdown-command-line-interface" name="command-line-interface"></a>
|
||||||
|
## Command-line interface
|
||||||
|
|
||||||
|
A command-line interface to Chroma is included.
|
||||||
|
|
||||||
|
Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases).
|
||||||
|
|
||||||
|
The CLI can be used as a preprocessor to colorise output of `less(1)`,
|
||||||
|
see documentation for the `LESSOPEN` environment variable.
|
||||||
|
|
||||||
|
The `--fail` flag can be used to suppress output and return with exit status
|
||||||
|
1 to facilitate falling back to some other preprocessor in case chroma
|
||||||
|
does not resolve a specific lexer to use for the given file. For example:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"'
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `cat` with your favourite fallback preprocessor.
|
||||||
|
|
||||||
|
When invoked as `.lessfilter`, the `--fail` flag is automatically turned
|
||||||
|
on under the hood for easy integration with [lesspipe shipping with
|
||||||
|
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
|
||||||
|
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
|
||||||
|
|
||||||
|
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
|
||||||
|
## What's missing compared to Pygments?
|
||||||
|
|
||||||
|
- Quite a few lexers, for various reasons (pull-requests welcome):
|
||||||
|
- Pygments lexers for complex languages often include custom code to
|
||||||
|
handle certain aspects, such as Raku's ability to nest code inside
|
||||||
|
regular expressions. These require time and effort to convert.
|
||||||
|
- I mostly only converted languages I had heard of, to reduce the porting cost.
|
||||||
|
- Some more esoteric features of Pygments are omitted for simplicity.
|
||||||
|
- Though the Chroma API supports content detection, very few languages support them.
|
||||||
|
I have plans to implement a statistical analyser at some point, but not enough time.
|
35
vendor/github.com/alecthomas/chroma/coalesce.go
generated
vendored
Normal file
35
vendor/github.com/alecthomas/chroma/coalesce.go
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
// Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
|
||||||
|
func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
|
||||||
|
|
||||||
|
type coalescer struct{ Lexer }
|
||||||
|
|
||||||
|
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
||||||
|
var prev Token
|
||||||
|
it, err := d.Lexer.Tokenise(options, text)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return func() Token {
|
||||||
|
for token := it(); token != (EOF); token = it() {
|
||||||
|
if len(token.Value) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if prev == EOF {
|
||||||
|
prev = token
|
||||||
|
} else {
|
||||||
|
if prev.Type == token.Type && len(prev.Value) < 8192 {
|
||||||
|
prev.Value += token.Value
|
||||||
|
} else {
|
||||||
|
out := prev
|
||||||
|
prev = token
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out := prev
|
||||||
|
prev = EOF
|
||||||
|
return out
|
||||||
|
}, nil
|
||||||
|
}
|
164
vendor/github.com/alecthomas/chroma/colour.go
generated
vendored
Normal file
164
vendor/github.com/alecthomas/chroma/colour.go
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.
|
||||||
|
var ANSI2RGB = map[string]string{
|
||||||
|
"#ansiblack": "000000",
|
||||||
|
"#ansidarkred": "7f0000",
|
||||||
|
"#ansidarkgreen": "007f00",
|
||||||
|
"#ansibrown": "7f7fe0",
|
||||||
|
"#ansidarkblue": "00007f",
|
||||||
|
"#ansipurple": "7f007f",
|
||||||
|
"#ansiteal": "007f7f",
|
||||||
|
"#ansilightgray": "e5e5e5",
|
||||||
|
// Normal
|
||||||
|
"#ansidarkgray": "555555",
|
||||||
|
"#ansired": "ff0000",
|
||||||
|
"#ansigreen": "00ff00",
|
||||||
|
"#ansiyellow": "ffff00",
|
||||||
|
"#ansiblue": "0000ff",
|
||||||
|
"#ansifuchsia": "ff00ff",
|
||||||
|
"#ansiturquoise": "00ffff",
|
||||||
|
"#ansiwhite": "ffffff",
|
||||||
|
|
||||||
|
// Aliases without the "ansi" prefix, because...why?
|
||||||
|
"#black": "000000",
|
||||||
|
"#darkred": "7f0000",
|
||||||
|
"#darkgreen": "007f00",
|
||||||
|
"#brown": "7f7fe0",
|
||||||
|
"#darkblue": "00007f",
|
||||||
|
"#purple": "7f007f",
|
||||||
|
"#teal": "007f7f",
|
||||||
|
"#lightgray": "e5e5e5",
|
||||||
|
// Normal
|
||||||
|
"#darkgray": "555555",
|
||||||
|
"#red": "ff0000",
|
||||||
|
"#green": "00ff00",
|
||||||
|
"#yellow": "ffff00",
|
||||||
|
"#blue": "0000ff",
|
||||||
|
"#fuchsia": "ff00ff",
|
||||||
|
"#turquoise": "00ffff",
|
||||||
|
"#white": "ffffff",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Colour represents an RGB colour.
|
||||||
|
type Colour int32
|
||||||
|
|
||||||
|
// NewColour creates a Colour directly from RGB values.
|
||||||
|
func NewColour(r, g, b uint8) Colour {
|
||||||
|
return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Distance between this colour and another.
|
||||||
|
//
|
||||||
|
// This uses the approach described here (https://www.compuphase.com/cmetric.htm).
|
||||||
|
// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.
|
||||||
|
func (c Colour) Distance(e2 Colour) float64 {
|
||||||
|
ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue())
|
||||||
|
br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue())
|
||||||
|
rmean := (ar + br) / 2
|
||||||
|
r := ar - br
|
||||||
|
g := ag - bg
|
||||||
|
b := ab - bb
|
||||||
|
return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Brighten returns a copy of this colour with its brightness adjusted.
|
||||||
|
//
|
||||||
|
// If factor is negative, the colour is darkened.
|
||||||
|
//
|
||||||
|
// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).
|
||||||
|
func (c Colour) Brighten(factor float64) Colour {
|
||||||
|
r := float64(c.Red())
|
||||||
|
g := float64(c.Green())
|
||||||
|
b := float64(c.Blue())
|
||||||
|
|
||||||
|
if factor < 0 {
|
||||||
|
factor++
|
||||||
|
r *= factor
|
||||||
|
g *= factor
|
||||||
|
b *= factor
|
||||||
|
} else {
|
||||||
|
r = (255-r)*factor + r
|
||||||
|
g = (255-g)*factor + g
|
||||||
|
b = (255-b)*factor + b
|
||||||
|
}
|
||||||
|
return NewColour(uint8(r), uint8(g), uint8(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// BrightenOrDarken brightens a colour if it is < 0.5 brighteness or darkens if > 0.5 brightness.
|
||||||
|
func (c Colour) BrightenOrDarken(factor float64) Colour {
|
||||||
|
if c.Brightness() < 0.5 {
|
||||||
|
return c.Brighten(factor)
|
||||||
|
}
|
||||||
|
return c.Brighten(-factor)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Brightness of the colour (roughly) in the range 0.0 to 1.0
|
||||||
|
func (c Colour) Brightness() float64 {
|
||||||
|
return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>.
|
||||||
|
// Will return an "unset" colour if invalid.
|
||||||
|
func ParseColour(colour string) Colour {
|
||||||
|
colour = normaliseColour(colour)
|
||||||
|
n, err := strconv.ParseUint(colour, 16, 32)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return Colour(n + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustParseColour is like ParseColour except it panics if the colour is invalid.
|
||||||
|
//
|
||||||
|
// Will panic if colour is in an invalid format.
|
||||||
|
func MustParseColour(colour string) Colour {
|
||||||
|
parsed := ParseColour(colour)
|
||||||
|
if !parsed.IsSet() {
|
||||||
|
panic(fmt.Errorf("invalid colour %q", colour))
|
||||||
|
}
|
||||||
|
return parsed
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSet returns true if the colour is set.
|
||||||
|
func (c Colour) IsSet() bool { return c != 0 }
|
||||||
|
|
||||||
|
func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) }
|
||||||
|
func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) }
|
||||||
|
|
||||||
|
// Red component of colour.
|
||||||
|
func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) }
|
||||||
|
|
||||||
|
// Green component of colour.
|
||||||
|
func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) }
|
||||||
|
|
||||||
|
// Blue component of colour.
|
||||||
|
func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) }
|
||||||
|
|
||||||
|
// Colours is an orderable set of colours.
|
||||||
|
type Colours []Colour
|
||||||
|
|
||||||
|
func (c Colours) Len() int { return len(c) }
|
||||||
|
func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
|
||||||
|
func (c Colours) Less(i, j int) bool { return c[i] < c[j] }
|
||||||
|
|
||||||
|
// Convert colours to #rrggbb.
|
||||||
|
func normaliseColour(colour string) string {
|
||||||
|
if ansi, ok := ANSI2RGB[colour]; ok {
|
||||||
|
return ansi
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(colour, "#") {
|
||||||
|
colour = colour[1:]
|
||||||
|
if len(colour) == 3 {
|
||||||
|
return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return colour
|
||||||
|
}
|
137
vendor/github.com/alecthomas/chroma/delegate.go
generated
vendored
Normal file
137
vendor/github.com/alecthomas/chroma/delegate.go
generated
vendored
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
type delegatingLexer struct {
|
||||||
|
root Lexer
|
||||||
|
language Lexer
|
||||||
|
}
|
||||||
|
|
||||||
|
// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP
|
||||||
|
// inside HTML or PHP inside plain text.
|
||||||
|
//
|
||||||
|
// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language
|
||||||
|
// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer.
|
||||||
|
// Finally, these two sets of tokens are merged.
|
||||||
|
//
|
||||||
|
// The lexers from the template lexer package use this base lexer.
|
||||||
|
func DelegatingLexer(root Lexer, language Lexer) Lexer {
|
||||||
|
return &delegatingLexer{
|
||||||
|
root: root,
|
||||||
|
language: language,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *delegatingLexer) Config() *Config {
|
||||||
|
return d.language.Config()
|
||||||
|
}
|
||||||
|
|
||||||
|
// An insertion is the character range where language tokens should be inserted.
|
||||||
|
type insertion struct {
|
||||||
|
start, end int
|
||||||
|
tokens []Token
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
|
||||||
|
tokens, err := Tokenise(Coalesce(d.language), options, text)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// Compute insertions and gather "Other" tokens.
|
||||||
|
others := &bytes.Buffer{}
|
||||||
|
insertions := []*insertion{}
|
||||||
|
var insert *insertion
|
||||||
|
offset := 0
|
||||||
|
var last Token
|
||||||
|
for _, t := range tokens {
|
||||||
|
if t.Type == Other {
|
||||||
|
if last != EOF && insert != nil && last.Type != Other {
|
||||||
|
insert.end = offset
|
||||||
|
}
|
||||||
|
others.WriteString(t.Value)
|
||||||
|
} else {
|
||||||
|
if last == EOF || last.Type == Other {
|
||||||
|
insert = &insertion{start: offset}
|
||||||
|
insertions = append(insertions, insert)
|
||||||
|
}
|
||||||
|
insert.tokens = append(insert.tokens, t)
|
||||||
|
}
|
||||||
|
last = t
|
||||||
|
offset += len(t.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(insertions) == 0 {
|
||||||
|
return d.root.Tokenise(options, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lex the other tokens.
|
||||||
|
rootTokens, err := Tokenise(Coalesce(d.root), options, others.String())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interleave the two sets of tokens.
|
||||||
|
var out []Token
|
||||||
|
offset = 0 // Offset into text.
|
||||||
|
tokenIndex := 0
|
||||||
|
nextToken := func() Token {
|
||||||
|
if tokenIndex >= len(rootTokens) {
|
||||||
|
return EOF
|
||||||
|
}
|
||||||
|
t := rootTokens[tokenIndex]
|
||||||
|
tokenIndex++
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
insertionIndex := 0
|
||||||
|
nextInsertion := func() *insertion {
|
||||||
|
if insertionIndex >= len(insertions) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
i := insertions[insertionIndex]
|
||||||
|
insertionIndex++
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
t := nextToken()
|
||||||
|
i := nextInsertion()
|
||||||
|
for t != EOF || i != nil {
|
||||||
|
// fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
|
||||||
|
if t == EOF || (i != nil && i.start < offset+len(t.Value)) {
|
||||||
|
var l Token
|
||||||
|
l, t = splitToken(t, i.start-offset)
|
||||||
|
if l != EOF {
|
||||||
|
out = append(out, l)
|
||||||
|
offset += len(l.Value)
|
||||||
|
}
|
||||||
|
out = append(out, i.tokens...)
|
||||||
|
offset += i.end - i.start
|
||||||
|
if t == EOF {
|
||||||
|
t = nextToken()
|
||||||
|
}
|
||||||
|
i = nextInsertion()
|
||||||
|
} else {
|
||||||
|
out = append(out, t)
|
||||||
|
offset += len(t.Value)
|
||||||
|
t = nextToken()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Literator(out...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func splitToken(t Token, offset int) (l Token, r Token) {
|
||||||
|
if t == EOF {
|
||||||
|
return EOF, EOF
|
||||||
|
}
|
||||||
|
if offset == 0 {
|
||||||
|
return EOF, t
|
||||||
|
}
|
||||||
|
if offset == len(t.Value) {
|
||||||
|
return t, EOF
|
||||||
|
}
|
||||||
|
l = t.Clone()
|
||||||
|
r = t.Clone()
|
||||||
|
l.Value = l.Value[:offset]
|
||||||
|
r.Value = r.Value[offset:]
|
||||||
|
return
|
||||||
|
}
|
7
vendor/github.com/alecthomas/chroma/doc.go
generated
vendored
Normal file
7
vendor/github.com/alecthomas/chroma/doc.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI-
|
||||||
|
// coloured text, etc.
|
||||||
|
//
|
||||||
|
// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.
|
||||||
|
//
|
||||||
|
// For more information, go here: https://github.com/alecthomas/chroma
|
||||||
|
package chroma
|
43
vendor/github.com/alecthomas/chroma/formatter.go
generated
vendored
Normal file
43
vendor/github.com/alecthomas/chroma/formatter.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A Formatter for Chroma lexers.
|
||||||
|
type Formatter interface {
|
||||||
|
// Format returns a formatting function for tokens.
|
||||||
|
//
|
||||||
|
// If the iterator panics, the Formatter should recover.
|
||||||
|
Format(w io.Writer, style *Style, iterator Iterator) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// A FormatterFunc is a Formatter implemented as a function.
|
||||||
|
//
|
||||||
|
// Guards against iterator panics.
|
||||||
|
type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error
|
||||||
|
|
||||||
|
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint
|
||||||
|
defer func() {
|
||||||
|
if perr := recover(); perr != nil {
|
||||||
|
err = perr.(error)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return f(w, s, it)
|
||||||
|
}
|
||||||
|
|
||||||
|
type recoveringFormatter struct {
|
||||||
|
Formatter
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) {
|
||||||
|
defer func() {
|
||||||
|
if perr := recover(); perr != nil {
|
||||||
|
err = perr.(error)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return r.Formatter.Format(w, s, it)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecoveringFormatter wraps a formatter with panic recovery.
|
||||||
|
func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} }
|
500
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
Normal file
500
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
Normal file
@ -0,0 +1,500 @@
|
|||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"html"
|
||||||
|
"io"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alecthomas/chroma"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Option sets an option of the HTML formatter.
|
||||||
|
type Option func(f *Formatter)
|
||||||
|
|
||||||
|
// Standalone configures the HTML formatter for generating a standalone HTML document.
|
||||||
|
func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } }
|
||||||
|
|
||||||
|
// ClassPrefix sets the CSS class prefix.
|
||||||
|
func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } }
|
||||||
|
|
||||||
|
// WithClasses emits HTML using CSS classes, rather than inline styles.
|
||||||
|
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
|
||||||
|
|
||||||
|
// WithAllClasses disables an optimisation that omits redundant CSS classes.
|
||||||
|
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
|
||||||
|
|
||||||
|
// TabWidth sets the number of characters for a tab. Defaults to 8.
|
||||||
|
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
|
||||||
|
|
||||||
|
// PreventSurroundingPre prevents the surrounding pre tags around the generated code.
|
||||||
|
func PreventSurroundingPre(b bool) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
if b {
|
||||||
|
f.preWrapper = nopPreWrapper
|
||||||
|
} else {
|
||||||
|
f.preWrapper = defaultPreWrapper
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithPreWrapper allows control of the surrounding pre tags.
|
||||||
|
func WithPreWrapper(wrapper PreWrapper) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.preWrapper = wrapper
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapLongLines wraps long lines.
|
||||||
|
func WrapLongLines(b bool) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.wrapLongLines = b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithLineNumbers formats output with line numbers.
|
||||||
|
func WithLineNumbers(b bool) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.lineNumbers = b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers
|
||||||
|
// and code in table td's, which make them copy-and-paste friendly.
|
||||||
|
func LineNumbersInTable(b bool) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.lineNumbersInTable = b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LinkableLineNumbers decorates the line numbers HTML elements with an "id"
|
||||||
|
// attribute so they can be linked.
|
||||||
|
func LinkableLineNumbers(b bool, prefix string) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.linkableLineNumbers = b
|
||||||
|
f.lineNumbersIDPrefix = prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HighlightLines higlights the given line ranges with the Highlight style.
|
||||||
|
//
|
||||||
|
// A range is the beginning and ending of a range as 1-based line numbers, inclusive.
|
||||||
|
func HighlightLines(ranges [][2]int) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.highlightRanges = ranges
|
||||||
|
sort.Sort(f.highlightRanges)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1.
|
||||||
|
func BaseLineNumber(n int) Option {
|
||||||
|
return func(f *Formatter) {
|
||||||
|
f.baseLineNumber = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// New HTML formatter.
|
||||||
|
func New(options ...Option) *Formatter {
|
||||||
|
f := &Formatter{
|
||||||
|
baseLineNumber: 1,
|
||||||
|
preWrapper: defaultPreWrapper,
|
||||||
|
}
|
||||||
|
for _, option := range options {
|
||||||
|
option(f)
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreWrapper defines the operations supported in WithPreWrapper.
|
||||||
|
type PreWrapper interface {
|
||||||
|
// Start is called to write a start <pre> element.
|
||||||
|
// The code flag tells whether this block surrounds
|
||||||
|
// highlighted code. This will be false when surrounding
|
||||||
|
// line numbers.
|
||||||
|
Start(code bool, styleAttr string) string
|
||||||
|
|
||||||
|
// End is called to write the end </pre> element.
|
||||||
|
End(code bool) string
|
||||||
|
}
|
||||||
|
|
||||||
|
type preWrapper struct {
|
||||||
|
start func(code bool, styleAttr string) string
|
||||||
|
end func(code bool) string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p preWrapper) Start(code bool, styleAttr string) string {
|
||||||
|
return p.start(code, styleAttr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p preWrapper) End(code bool) string {
|
||||||
|
return p.end(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
nopPreWrapper = preWrapper{
|
||||||
|
start: func(code bool, styleAttr string) string { return "" },
|
||||||
|
end: func(code bool) string { return "" },
|
||||||
|
}
|
||||||
|
defaultPreWrapper = preWrapper{
|
||||||
|
start: func(code bool, styleAttr string) string {
|
||||||
|
if code {
|
||||||
|
return fmt.Sprintf(`<pre tabindex="0"%s><code>`, styleAttr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(`<pre tabindex="0"%s>`, styleAttr)
|
||||||
|
},
|
||||||
|
end: func(code bool) string {
|
||||||
|
if code {
|
||||||
|
return `</code></pre>`
|
||||||
|
}
|
||||||
|
|
||||||
|
return `</pre>`
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Formatter that generates HTML.
|
||||||
|
type Formatter struct {
|
||||||
|
standalone bool
|
||||||
|
prefix string
|
||||||
|
Classes bool // Exported field to detect when classes are being used
|
||||||
|
allClasses bool
|
||||||
|
preWrapper PreWrapper
|
||||||
|
tabWidth int
|
||||||
|
wrapLongLines bool
|
||||||
|
lineNumbers bool
|
||||||
|
lineNumbersInTable bool
|
||||||
|
linkableLineNumbers bool
|
||||||
|
lineNumbersIDPrefix string
|
||||||
|
highlightRanges highlightRanges
|
||||||
|
baseLineNumber int
|
||||||
|
}
|
||||||
|
|
||||||
|
type highlightRanges [][2]int
|
||||||
|
|
||||||
|
func (h highlightRanges) Len() int { return len(h) }
|
||||||
|
func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
|
||||||
|
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
|
||||||
|
|
||||||
|
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
|
||||||
|
return f.writeHTML(w, style, iterator.Tokens())
|
||||||
|
}
|
||||||
|
|
||||||
|
// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked).
|
||||||
|
//
|
||||||
|
// OTOH we need to be super careful about correct escaping...
|
||||||
|
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
|
||||||
|
css := f.styleToCSS(style)
|
||||||
|
if !f.Classes {
|
||||||
|
for t, style := range css {
|
||||||
|
css[t] = compressStyle(style)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if f.standalone {
|
||||||
|
fmt.Fprint(w, "<html>\n")
|
||||||
|
if f.Classes {
|
||||||
|
fmt.Fprint(w, "<style type=\"text/css\">\n")
|
||||||
|
err = f.WriteCSS(w, style)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background])
|
||||||
|
fmt.Fprint(w, "</style>")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "<body%s>\n", f.styleAttr(css, chroma.Background))
|
||||||
|
}
|
||||||
|
|
||||||
|
wrapInTable := f.lineNumbers && f.lineNumbersInTable
|
||||||
|
|
||||||
|
lines := chroma.SplitTokensIntoLines(tokens)
|
||||||
|
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
|
||||||
|
highlightIndex := 0
|
||||||
|
|
||||||
|
if wrapInTable {
|
||||||
|
// List line numbers in its own <td>
|
||||||
|
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
|
||||||
|
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
|
||||||
|
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
|
||||||
|
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
|
||||||
|
for index := range lines {
|
||||||
|
line := f.baseLineNumber + index
|
||||||
|
highlight, next := f.shouldHighlight(highlightIndex, line)
|
||||||
|
if next {
|
||||||
|
highlightIndex++
|
||||||
|
}
|
||||||
|
if highlight {
|
||||||
|
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||||
|
|
||||||
|
if highlight {
|
||||||
|
fmt.Fprintf(w, "</span>")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, f.preWrapper.End(false))
|
||||||
|
fmt.Fprint(w, "</td>\n")
|
||||||
|
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
|
||||||
|
|
||||||
|
highlightIndex = 0
|
||||||
|
for index, tokens := range lines {
|
||||||
|
// 1-based line number.
|
||||||
|
line := f.baseLineNumber + index
|
||||||
|
highlight, next := f.shouldHighlight(highlightIndex, line)
|
||||||
|
if next {
|
||||||
|
highlightIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start of Line
|
||||||
|
fmt.Fprint(w, `<span`)
|
||||||
|
if highlight {
|
||||||
|
// Line + LineHighlight
|
||||||
|
if f.Classes {
|
||||||
|
fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight))
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, `>`)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line number
|
||||||
|
if f.lineNumbers && !wrapInTable {
|
||||||
|
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine))
|
||||||
|
|
||||||
|
for _, token := range tokens {
|
||||||
|
html := html.EscapeString(token.String())
|
||||||
|
attr := f.styleAttr(css, token.Type)
|
||||||
|
if attr != "" {
|
||||||
|
html = fmt.Sprintf("<span%s>%s</span>", attr, html)
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, html)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprint(w, `</span>`) // End of CodeLine
|
||||||
|
|
||||||
|
fmt.Fprint(w, `</span>`) // End of Line
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, f.preWrapper.End(true))
|
||||||
|
|
||||||
|
if wrapInTable {
|
||||||
|
fmt.Fprint(w, "</td></tr></table>\n")
|
||||||
|
fmt.Fprint(w, "</div>\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.standalone {
|
||||||
|
fmt.Fprint(w, "\n</body>\n")
|
||||||
|
fmt.Fprint(w, "</html>\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) lineIDAttribute(line int) string {
|
||||||
|
if !f.linkableLineNumbers {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
|
||||||
|
title := fmt.Sprintf("%*d", lineDigits, line)
|
||||||
|
if !f.linkableLineNumbers {
|
||||||
|
return title
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) lineID(line int) string {
|
||||||
|
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
|
||||||
|
next := false
|
||||||
|
for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] {
|
||||||
|
highlightIndex++
|
||||||
|
next = true
|
||||||
|
}
|
||||||
|
if highlightIndex < len(f.highlightRanges) {
|
||||||
|
hrange := f.highlightRanges[highlightIndex]
|
||||||
|
if line >= hrange[0] && line <= hrange[1] {
|
||||||
|
return true, next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, next
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) class(t chroma.TokenType) string {
|
||||||
|
for t != 0 {
|
||||||
|
if cls, ok := chroma.StandardTypes[t]; ok {
|
||||||
|
if cls != "" {
|
||||||
|
return f.prefix + cls
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
t = t.Parent()
|
||||||
|
}
|
||||||
|
if cls := chroma.StandardTypes[t]; cls != "" {
|
||||||
|
return f.prefix + cls
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string {
|
||||||
|
if f.Classes {
|
||||||
|
cls := f.class(tt)
|
||||||
|
if cls == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(` class="%s"`, cls)
|
||||||
|
}
|
||||||
|
if _, ok := styles[tt]; !ok {
|
||||||
|
tt = tt.SubCategory()
|
||||||
|
if _, ok := styles[tt]; !ok {
|
||||||
|
tt = tt.Category()
|
||||||
|
if _, ok := styles[tt]; !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
css := []string{styles[tt]}
|
||||||
|
css = append(css, extraCSS...)
|
||||||
|
return fmt.Sprintf(` style="%s"`, strings.Join(css, ";"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) tabWidthStyle() string {
|
||||||
|
if f.tabWidth != 0 && f.tabWidth != 8 {
|
||||||
|
return fmt.Sprintf("; -moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d", f.tabWidth)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteCSS writes CSS style definitions (without any surrounding HTML).
|
||||||
|
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||||
|
css := f.styleToCSS(style)
|
||||||
|
// Special-case background as it is mapped to the outer ".chroma" class.
|
||||||
|
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Special-case PreWrapper as it is the ".chroma" class.
|
||||||
|
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Special-case code column of table to expand width.
|
||||||
|
if f.lineNumbers && f.lineNumbersInTable {
|
||||||
|
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }",
|
||||||
|
chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Special-case line number highlighting when targeted.
|
||||||
|
if f.lineNumbers || f.lineNumbersInTable {
|
||||||
|
targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight))
|
||||||
|
for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} {
|
||||||
|
fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tts := []int{}
|
||||||
|
for tt := range css {
|
||||||
|
tts = append(tts, int(tt))
|
||||||
|
}
|
||||||
|
sort.Ints(tts)
|
||||||
|
for _, ti := range tts {
|
||||||
|
tt := chroma.TokenType(ti)
|
||||||
|
switch tt {
|
||||||
|
case chroma.Background, chroma.PreWrapper:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
class := f.class(tt)
|
||||||
|
if class == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
styles := css[tt]
|
||||||
|
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string {
|
||||||
|
classes := map[chroma.TokenType]string{}
|
||||||
|
bg := style.Get(chroma.Background)
|
||||||
|
// Convert the style.
|
||||||
|
for t := range chroma.StandardTypes {
|
||||||
|
entry := style.Get(t)
|
||||||
|
if t != chroma.Background {
|
||||||
|
entry = entry.Sub(bg)
|
||||||
|
}
|
||||||
|
if !f.allClasses && entry.IsZero() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
classes[t] = StyleEntryToCSS(entry)
|
||||||
|
}
|
||||||
|
classes[chroma.Background] += f.tabWidthStyle()
|
||||||
|
classes[chroma.PreWrapper] += classes[chroma.Background] + `;`
|
||||||
|
// Make PreWrapper a grid to show highlight style with full width.
|
||||||
|
if len(f.highlightRanges) > 0 {
|
||||||
|
classes[chroma.PreWrapper] += `display: grid;`
|
||||||
|
}
|
||||||
|
// Make PreWrapper wrap long lines.
|
||||||
|
if f.wrapLongLines {
|
||||||
|
classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
|
||||||
|
}
|
||||||
|
lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
|
||||||
|
// All rules begin with default rules followed by user provided rules
|
||||||
|
classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
|
||||||
|
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
|
||||||
|
classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
|
||||||
|
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
|
||||||
|
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
|
||||||
|
return classes
|
||||||
|
}
|
||||||
|
|
||||||
|
// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes.
|
||||||
|
func StyleEntryToCSS(e chroma.StyleEntry) string {
|
||||||
|
styles := []string{}
|
||||||
|
if e.Colour.IsSet() {
|
||||||
|
styles = append(styles, "color: "+e.Colour.String())
|
||||||
|
}
|
||||||
|
if e.Background.IsSet() {
|
||||||
|
styles = append(styles, "background-color: "+e.Background.String())
|
||||||
|
}
|
||||||
|
if e.Bold == chroma.Yes {
|
||||||
|
styles = append(styles, "font-weight: bold")
|
||||||
|
}
|
||||||
|
if e.Italic == chroma.Yes {
|
||||||
|
styles = append(styles, "font-style: italic")
|
||||||
|
}
|
||||||
|
if e.Underline == chroma.Yes {
|
||||||
|
styles = append(styles, "text-decoration: underline")
|
||||||
|
}
|
||||||
|
return strings.Join(styles, "; ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compress CSS attributes - remove spaces, transform 6-digit colours to 3.
|
||||||
|
func compressStyle(s string) string {
|
||||||
|
parts := strings.Split(s, ";")
|
||||||
|
out := []string{}
|
||||||
|
for _, p := range parts {
|
||||||
|
p = strings.Join(strings.Fields(p), " ")
|
||||||
|
p = strings.Replace(p, ": ", ":", 1)
|
||||||
|
if strings.Contains(p, "#") {
|
||||||
|
c := p[len(p)-6:]
|
||||||
|
if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] {
|
||||||
|
p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out = append(out, p)
|
||||||
|
}
|
||||||
|
return strings.Join(out, ";")
|
||||||
|
}
|
9
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
Normal file
9
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
module github.com/alecthomas/chroma
|
||||||
|
|
||||||
|
go 1.13
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/dlclark/regexp2 v1.4.0
|
||||||
|
github.com/stretchr/testify v1.7.0
|
||||||
|
)
|
14
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
Normal file
14
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
76
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
Normal file
76
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// An Iterator across tokens.
|
||||||
|
//
|
||||||
|
// EOF will be returned at the end of the Token stream.
|
||||||
|
//
|
||||||
|
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
||||||
|
type Iterator func() Token
|
||||||
|
|
||||||
|
// Tokens consumes all tokens from the iterator and returns them as a slice.
|
||||||
|
func (i Iterator) Tokens() []Token {
|
||||||
|
var out []Token
|
||||||
|
for t := i(); t != EOF; t = i() {
|
||||||
|
out = append(out, t)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concaterator concatenates tokens from a series of iterators.
|
||||||
|
func Concaterator(iterators ...Iterator) Iterator {
|
||||||
|
return func() Token {
|
||||||
|
for len(iterators) > 0 {
|
||||||
|
t := iterators[0]()
|
||||||
|
if t != EOF {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
iterators = iterators[1:]
|
||||||
|
}
|
||||||
|
return EOF
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Literator converts a sequence of literal Tokens into an Iterator.
|
||||||
|
func Literator(tokens ...Token) Iterator {
|
||||||
|
return func() Token {
|
||||||
|
if len(tokens) == 0 {
|
||||||
|
return EOF
|
||||||
|
}
|
||||||
|
token := tokens[0]
|
||||||
|
tokens = tokens[1:]
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SplitTokensIntoLines splits tokens containing newlines in two.
|
||||||
|
func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
|
||||||
|
var line []Token // nolint: prealloc
|
||||||
|
for _, token := range tokens {
|
||||||
|
for strings.Contains(token.Value, "\n") {
|
||||||
|
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
||||||
|
// Token becomes the tail.
|
||||||
|
token.Value = parts[1]
|
||||||
|
|
||||||
|
// Append the head to the line and flush the line.
|
||||||
|
clone := token.Clone()
|
||||||
|
clone.Value = parts[0]
|
||||||
|
line = append(line, clone)
|
||||||
|
out = append(out, line)
|
||||||
|
line = nil
|
||||||
|
}
|
||||||
|
line = append(line, token)
|
||||||
|
}
|
||||||
|
if len(line) > 0 {
|
||||||
|
out = append(out, line)
|
||||||
|
}
|
||||||
|
// Strip empty trailing token line.
|
||||||
|
if len(out) > 0 {
|
||||||
|
last := out[len(out)-1]
|
||||||
|
if len(last) == 1 && last[0].Value == "" {
|
||||||
|
out = out[:len(out)-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
128
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
Normal file
128
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
package chroma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
defaultOptions = &TokeniseOptions{
|
||||||
|
State: "root",
|
||||||
|
EnsureLF: true,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config for a lexer.
|
||||||
|
type Config struct {
|
||||||
|
// Name of the lexer.
|
||||||
|
Name string
|
||||||
|
|
||||||
|
// Shortcuts for the lexer
|
||||||
|
Aliases []string
|
||||||
|
|
||||||
|
// File name globs
|
||||||
|
Filenames []string
|
||||||
|
|
||||||
|
// Secondary file name globs
|
||||||
|
AliasFilenames []string
|
||||||
|
|
||||||
|
// MIME types
|
||||||
|
MimeTypes []string
|
||||||
|
|
||||||
|
// Regex matching is case-insensitive.
|
||||||
|
CaseInsensitive bool
|
||||||
|
|
||||||
|
// Regex matches all characters.
|
||||||
|
DotAll bool
|
||||||
|
|
||||||
|
// Regex does not match across lines ($ matches EOL).
|
||||||
|
//
|
||||||
|
// Defaults to multiline.
|
||||||
|
NotMultiline bool
|
||||||
|
|
||||||
|
// Don't strip leading and trailing newlines from the input.
|
||||||
|
// DontStripNL bool
|
||||||
|
|
||||||
|
// Strip all leading and trailing whitespace from the input
|
||||||
|
// StripAll bool
|
||||||
|
|
||||||
|
// Make sure that the input ends with a newline. This
|
||||||
|
// is required for some lexers that consume input linewise.
|
||||||
|
EnsureNL bool
|
||||||
|
|
||||||
|
// If given and greater than 0, expand tabs in the input.
|
||||||
|
// TabSize int
|
||||||
|
|
||||||
|
// Priority of lexer.
|
||||||
|
//
|
||||||
|
// If this is 0 it will be treated as a default of 1.
|
||||||
|
Priority float32
|
||||||
|
}
|
||||||
|
|
||||||
|
// Token output to formatter.
|
||||||
|
type Token struct {
|
||||||
|
Type TokenType `json:"type"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Token) String() string { return t.Value }
|
||||||
|
func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
|
||||||
|
|
||||||
|
// Clone returns a clone of the Token.
|
||||||
|
func (t *Token) Clone() Token {
|
||||||
|
return *t
|
||||||
|
}
|
||||||
|
|
||||||
|
// EOF is returned by lexers at the end of input.
|
||||||
|
var EOF Token
|
||||||
|
|
||||||
|
// TokeniseOptions contains options for tokenisers.
|
||||||
|
type TokeniseOptions struct {
|
||||||
|
// State to start tokenisation in. Defaults to "root".
|
||||||
|
State string
|
||||||
|
// Nested tokenisation.
|
||||||
|
Nested bool
|
||||||
|
|
||||||
|
// If true, all EOLs are converted into LF
|
||||||
|
// by replacing CRLF and CR
|
||||||
|
EnsureLF bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Lexer for tokenising source code.
|
||||||
|
type Lexer interface {
|
||||||
|
// Config describing the features of the Lexer.
|
||||||
|
Config() *Config
|
||||||
|
// Tokenise returns an Iterator over tokens in text.
|
||||||
|
Tokenise(options *TokeniseOptions, text string) (Iterator, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lexers is a slice of lexers sortable by name.
|
||||||
|
type Lexers []Lexer
|
||||||
|
|
||||||
|
func (l Lexers) Len() int { return len(l) }
|
||||||
|
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||||
|
func (l Lexers) Less(i, j int) bool {
|
||||||
|
return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrioritisedLexers is a slice of lexers sortable by priority.
|
||||||
|
type PrioritisedLexers []Lexer
|
||||||
|
|
||||||
|
func (l PrioritisedLexers) Len() int { return len(l) }
|
||||||
|
func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||||
|
func (l PrioritisedLexers) Less(i, j int) bool {
|
||||||
|
ip := l[i].Config().Priority
|
||||||
|
if ip == 0 {
|
||||||
|
ip = 1
|
||||||
|
}
|
||||||
|
jp := l[j].Config().Priority
|
||||||
|
if jp == 0 {
|
||||||
|
jp = 1
|
||||||
|
}
|
||||||
|
return ip > jp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyser determines how appropriate this lexer is for the given text.
|
||||||
|
type Analyser interface {
|
||||||
|
AnalyseText(text string) float32
|
||||||
|
}
|
40
vendor/github.com/alecthomas/chroma/lexers/README.md
generated
vendored
Normal file
40
vendor/github.com/alecthomas/chroma/lexers/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# Lexer tests
|
||||||
|
|
||||||
|
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
|
||||||
|
that its output matches `<name>.exported`.
|
||||||
|
|
||||||
|
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
|
||||||
|
directory `testdata/<name>/`.
|
||||||
|
|
||||||
|
## Running the tests
|
||||||
|
|
||||||
|
Run the tests as normal:
|
||||||
|
```go
|
||||||
|
go test ./lexers
|
||||||
|
```
|
||||||
|
|
||||||
|
## Update existing tests
|
||||||
|
When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer.
|
||||||
|
|
||||||
|
To regenerate all tests, type in your terminal:
|
||||||
|
|
||||||
|
```go
|
||||||
|
RECORD=true go test ./lexers
|
||||||
|
```
|
||||||
|
|
||||||
|
This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project.
|
||||||
|
|
||||||
|
(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.)
|
||||||
|
|
||||||
|
### Windows users
|
||||||
|
Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell.
|
||||||
|
|
||||||
|
Instead we have to perform both steps separately:
|
||||||
|
|
||||||
|
- Set the `RECORD` environment variable to `true`.
|
||||||
|
+ In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more.
|
||||||
|
+ In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more.
|
||||||
|
+ You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how.
|
||||||
|
- When the environment variable is set, run `go tests ./lexers`.
|
||||||
|
|
||||||
|
Chroma will now regenerate the test files and print its results to the console window.
|
60
vendor/github.com/alecthomas/chroma/lexers/a/abap.go
generated
vendored
Normal file
60
vendor/github.com/alecthomas/chroma/lexers/a/abap.go
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ABAP lexer.
|
||||||
|
var Abap = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "ABAP",
|
||||||
|
Aliases: []string{"abap"},
|
||||||
|
Filenames: []string{"*.abap", "*.ABAP"},
|
||||||
|
MimeTypes: []string{"text/x-abap"},
|
||||||
|
CaseInsensitive: true,
|
||||||
|
},
|
||||||
|
abapRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func abapRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"common": {
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`^\*.*$`, CommentSingle, nil},
|
||||||
|
{`\".*?\n`, CommentSingle, nil},
|
||||||
|
{`##\w+`, CommentSpecial, nil},
|
||||||
|
},
|
||||||
|
"variable-names": {
|
||||||
|
{`<\S+>`, NameVariable, nil},
|
||||||
|
{`\w[\w~]*(?:(\[\])|->\*)?`, NameVariable, nil},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
Include("common"),
|
||||||
|
{`CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)`, Keyword, nil},
|
||||||
|
{`(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b`, Keyword, nil},
|
||||||
|
{`(FORM|PERFORM)(\s+)(\w+)`, ByGroups(Keyword, Text, NameFunction), nil},
|
||||||
|
{`(PERFORM)(\s+)(\()(\w+)(\))`, ByGroups(Keyword, Text, Punctuation, NameVariable, Punctuation), nil},
|
||||||
|
{`(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)`, ByGroups(Keyword, Text, NameFunction, Text, Keyword), nil},
|
||||||
|
{`(METHOD)(\s+)([\w~]+)`, ByGroups(Keyword, Text, NameFunction), nil},
|
||||||
|
{`(\s+)([\w\-]+)([=\-]>)([\w\-~]+)`, ByGroups(Text, NameVariable, Operator, NameFunction), nil},
|
||||||
|
{`(?<=(=|-)>)([\w\-~]+)(?=\()`, NameFunction, nil},
|
||||||
|
{`(TEXT)(-)(\d{3})`, ByGroups(Keyword, Punctuation, LiteralNumberInteger), nil},
|
||||||
|
{`(TEXT)(-)(\w{3})`, ByGroups(Keyword, Punctuation, NameVariable), nil},
|
||||||
|
{`(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b`, Keyword, nil},
|
||||||
|
{`(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b`, Keyword, nil},
|
||||||
|
{`(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b`, Keyword, nil},
|
||||||
|
{`(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b`, ByGroups(NameBuiltin, Punctuation), nil},
|
||||||
|
{`&[0-9]`, Name, nil},
|
||||||
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b`, OperatorWord, nil},
|
||||||
|
Include("variable-names"),
|
||||||
|
{`[?*<>=\-+&]`, Operator, nil},
|
||||||
|
{`'(''|[^'])*'`, LiteralStringSingle, nil},
|
||||||
|
{"`([^`])*`", LiteralStringSingle, nil},
|
||||||
|
{`([|}])([^{}|]*?)([|{])`, ByGroups(Punctuation, LiteralStringSingle, Punctuation), nil},
|
||||||
|
{`[/;:()\[\],.]`, Punctuation, nil},
|
||||||
|
{`(!)(\w+)`, ByGroups(Operator, Name), nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
42
vendor/github.com/alecthomas/chroma/lexers/a/abnf.go
generated
vendored
Normal file
42
vendor/github.com/alecthomas/chroma/lexers/a/abnf.go
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Abnf lexer.
|
||||||
|
var Abnf = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "ABNF",
|
||||||
|
Aliases: []string{"abnf"},
|
||||||
|
Filenames: []string{"*.abnf"},
|
||||||
|
MimeTypes: []string{"text/x-abnf"},
|
||||||
|
},
|
||||||
|
abnfRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func abnfRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`;.*$`, CommentSingle, nil},
|
||||||
|
{`(%[si])?"[^"]*"`, Literal, nil},
|
||||||
|
{`%b[01]+\-[01]+\b`, Literal, nil},
|
||||||
|
{`%b[01]+(\.[01]+)*\b`, Literal, nil},
|
||||||
|
{`%d[0-9]+\-[0-9]+\b`, Literal, nil},
|
||||||
|
{`%d[0-9]+(\.[0-9]+)*\b`, Literal, nil},
|
||||||
|
{`%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b`, Literal, nil},
|
||||||
|
{`%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b`, Literal, nil},
|
||||||
|
{`\b[0-9]+\*[0-9]+`, Operator, nil},
|
||||||
|
{`\b[0-9]+\*`, Operator, nil},
|
||||||
|
{`\b[0-9]+`, Operator, nil},
|
||||||
|
{`\*`, Operator, nil},
|
||||||
|
{Words(``, `\b`, `ALPHA`, `BIT`, `CHAR`, `CR`, `CRLF`, `CTL`, `DIGIT`, `DQUOTE`, `HEXDIG`, `HTAB`, `LF`, `LWSP`, `OCTET`, `SP`, `VCHAR`, `WSP`), Keyword, nil},
|
||||||
|
{`[a-zA-Z][a-zA-Z0-9-]+\b`, NameClass, nil},
|
||||||
|
{`(=/|=|/)`, Operator, nil},
|
||||||
|
{`[\[\]()]`, Punctuation, nil},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`.`, Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
43
vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go
generated
vendored
Normal file
43
vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Actionscript lexer.
|
||||||
|
var Actionscript = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "ActionScript",
|
||||||
|
Aliases: []string{"as", "actionscript"},
|
||||||
|
Filenames: []string{"*.as"},
|
||||||
|
MimeTypes: []string{"application/x-actionscript", "text/x-actionscript", "text/actionscript"},
|
||||||
|
NotMultiline: true,
|
||||||
|
DotAll: true,
|
||||||
|
},
|
||||||
|
actionscriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func actionscriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||||
|
{`/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, nil},
|
||||||
|
{`[~^*!%&<>|+=:;,/?\\-]+`, Operator, nil},
|
||||||
|
{`[{}\[\]();.]+`, Punctuation, nil},
|
||||||
|
{Words(``, `\b`, `case`, `default`, `for`, `each`, `in`, `while`, `do`, `break`, `return`, `continue`, `if`, `else`, `throw`, `try`, `catch`, `var`, `with`, `new`, `typeof`, `arguments`, `instanceof`, `this`, `switch`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `class`, `public`, `final`, `internal`, `native`, `override`, `private`, `protected`, `static`, `import`, `extends`, `implements`, `interface`, `intrinsic`, `return`, `super`, `dynamic`, `function`, `const`, `get`, `namespace`, `package`, `set`), KeywordDeclaration, nil},
|
||||||
|
{`(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b`, KeywordConstant, nil},
|
||||||
|
{Words(``, `\b`, `Accessibility`, `AccessibilityProperties`, `ActionScriptVersion`, `ActivityEvent`, `AntiAliasType`, `ApplicationDomain`, `AsBroadcaster`, `Array`, `AsyncErrorEvent`, `AVM1Movie`, `BevelFilter`, `Bitmap`, `BitmapData`, `BitmapDataChannel`, `BitmapFilter`, `BitmapFilterQuality`, `BitmapFilterType`, `BlendMode`, `BlurFilter`, `Boolean`, `ByteArray`, `Camera`, `Capabilities`, `CapsStyle`, `Class`, `Color`, `ColorMatrixFilter`, `ColorTransform`, `ContextMenu`, `ContextMenuBuiltInItems`, `ContextMenuEvent`, `ContextMenuItem`, `ConvultionFilter`, `CSMSettings`, `DataEvent`, `Date`, `DefinitionError`, `DeleteObjectSample`, `Dictionary`, `DisplacmentMapFilter`, `DisplayObject`, `DisplacmentMapFilterMode`, `DisplayObjectContainer`, `DropShadowFilter`, `Endian`, `EOFError`, `Error`, `ErrorEvent`, `EvalError`, `Event`, `EventDispatcher`, `EventPhase`, `ExternalInterface`, `FileFilter`, `FileReference`, `FileReferenceList`, `FocusDirection`, `FocusEvent`, `Font`, `FontStyle`, `FontType`, `FrameLabel`, `FullScreenEvent`, `Function`, `GlowFilter`, `GradientBevelFilter`, `GradientGlowFilter`, `GradientType`, `Graphics`, `GridFitType`, `HTTPStatusEvent`, `IBitmapDrawable`, `ID3Info`, `IDataInput`, `IDataOutput`, `IDynamicPropertyOutputIDynamicPropertyWriter`, `IEventDispatcher`, `IExternalizable`, `IllegalOperationError`, `IME`, `IMEConversionMode`, `IMEEvent`, `int`, `InteractiveObject`, `InterpolationMethod`, `InvalidSWFError`, `InvokeEvent`, `IOError`, `IOErrorEvent`, `JointStyle`, `Key`, `Keyboard`, `KeyboardEvent`, `KeyLocation`, `LineScaleMode`, `Loader`, `LoaderContext`, `LoaderInfo`, `LoadVars`, `LocalConnection`, `Locale`, `Math`, `Matrix`, `MemoryError`, `Microphone`, `MorphShape`, `Mouse`, `MouseEvent`, `MovieClip`, `MovieClipLoader`, `Namespace`, `NetConnection`, `NetStatusEvent`, `NetStream`, `NewObjectSample`, `Number`, `Object`, `ObjectEncoding`, `PixelSnapping`, `Point`, `PrintJob`, `PrintJobOptions`, `PrintJobOrientation`, `ProgressEvent`, `Proxy`, `QName`, `RangeError`, `Rectangle`, `ReferenceError`, `RegExp`, `Responder`, `Sample`, `Scene`, `ScriptTimeoutError`, `Security`, `SecurityDomain`, `SecurityError`, `SecurityErrorEvent`, `SecurityPanel`, `Selection`, `Shape`, `SharedObject`, `SharedObjectFlushStatus`, `SimpleButton`, `Socket`, `Sound`, `SoundChannel`, `SoundLoaderContext`, `SoundMixer`, `SoundTransform`, `SpreadMethod`, `Sprite`, `StackFrame`, `StackOverflowError`, `Stage`, `StageAlign`, `StageDisplayState`, `StageQuality`, `StageScaleMode`, `StaticText`, `StatusEvent`, `String`, `StyleSheet`, `SWFVersion`, `SyncEvent`, `SyntaxError`, `System`, `TextColorType`, `TextField`, `TextFieldAutoSize`, `TextFieldType`, `TextFormat`, `TextFormatAlign`, `TextLineMetrics`, `TextRenderer`, `TextSnapshot`, `Timer`, `TimerEvent`, `Transform`, `TypeError`, `uint`, `URIError`, `URLLoader`, `URLLoaderDataFormat`, `URLRequest`, `URLRequestHeader`, `URLRequestMethod`, `URLStream`, `URLVariabeles`, `VerifyError`, `Video`, `XML`, `XMLDocument`, `XMLList`, `XMLNode`, `XMLNodeType`, `XMLSocket`, `XMLUI`), NameBuiltin, nil},
|
||||||
|
{Words(``, `\b`, `decodeURI`, `decodeURIComponent`, `encodeURI`, `escape`, `eval`, `isFinite`, `isNaN`, `isXMLName`, `clearInterval`, `fscommand`, `getTimer`, `getURL`, `getVersion`, `parseFloat`, `parseInt`, `setInterval`, `trace`, `updateAfterEvent`, `unescape`), NameFunction, nil},
|
||||||
|
{`[$a-zA-Z_]\w*`, NameOther, nil},
|
||||||
|
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
||||||
|
{`0x[0-9a-f]+`, LiteralNumberHex, nil},
|
||||||
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
60
vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go
generated
vendored
Normal file
60
vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Actionscript 3 lexer.
|
||||||
|
var Actionscript3 = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "ActionScript 3",
|
||||||
|
Aliases: []string{"as3", "actionscript3"},
|
||||||
|
Filenames: []string{"*.as"},
|
||||||
|
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
|
||||||
|
DotAll: true,
|
||||||
|
},
|
||||||
|
actionscript3Rules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func actionscript3Rules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
|
||||||
|
{`(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)`, ByGroups(KeywordDeclaration, Text, Name, Text, Punctuation, Text, KeywordType), nil},
|
||||||
|
{`(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)`, ByGroups(Keyword, Text, NameNamespace, Text), nil},
|
||||||
|
{`(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()`, ByGroups(Keyword, Text, KeywordType, Text, Operator), nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||||
|
{`/(\\\\|\\/|[^\n])*/[gisx]*`, LiteralStringRegex, nil},
|
||||||
|
{`(\.)([$a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil},
|
||||||
|
{`(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b`, Keyword, nil},
|
||||||
|
{`(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b`, KeywordDeclaration, nil},
|
||||||
|
{`(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b`, KeywordConstant, nil},
|
||||||
|
{`(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b`, NameFunction, nil},
|
||||||
|
{`[$a-zA-Z_]\w*`, Name, nil},
|
||||||
|
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
||||||
|
{`0x[0-9a-f]+`, LiteralNumberHex, nil},
|
||||||
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
|
{`[~^*!%&<>|+=:;,/?\\{}\[\]().-]+`, Operator, nil},
|
||||||
|
},
|
||||||
|
"funcparams": {
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)`, ByGroups(Text, Punctuation, Name, Text, Operator, Text, KeywordType, Text), Push("defval")},
|
||||||
|
{`\)`, Operator, Push("type")},
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
{`(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)`, ByGroups(Text, Operator, Text, KeywordType), Pop(2)},
|
||||||
|
{`\s+`, Text, Pop(2)},
|
||||||
|
Default(Pop(2)),
|
||||||
|
},
|
||||||
|
"defval": {
|
||||||
|
{`(=)(\s*)([^(),]+)(\s*)(,?)`, ByGroups(Operator, Text, UsingSelf("root"), Text, Operator), Pop(1)},
|
||||||
|
{`,`, Operator, Pop(1)},
|
||||||
|
Default(Pop(1)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
118
vendor/github.com/alecthomas/chroma/lexers/a/ada.go
generated
vendored
Normal file
118
vendor/github.com/alecthomas/chroma/lexers/a/ada.go
generated
vendored
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ada lexer.
|
||||||
|
var Ada = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Ada",
|
||||||
|
Aliases: []string{"ada", "ada95", "ada2005"},
|
||||||
|
Filenames: []string{"*.adb", "*.ads", "*.ada"},
|
||||||
|
MimeTypes: []string{"text/x-ada"},
|
||||||
|
CaseInsensitive: true,
|
||||||
|
},
|
||||||
|
adaRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func adaRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
{`--.*?\n`, CommentSingle, nil},
|
||||||
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
{`function|procedure|entry`, KeywordDeclaration, Push("subprogram")},
|
||||||
|
{`(subtype|type)(\s+)(\w+)`, ByGroups(KeywordDeclaration, Text, KeywordType), Push("type_def")},
|
||||||
|
{`task|protected`, KeywordDeclaration, nil},
|
||||||
|
{`(subtype)(\s+)`, ByGroups(KeywordDeclaration, Text), nil},
|
||||||
|
{`(end)(\s+)`, ByGroups(KeywordReserved, Text), Push("end")},
|
||||||
|
{`(pragma)(\s+)(\w+)`, ByGroups(KeywordReserved, Text, CommentPreproc), nil},
|
||||||
|
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||||
|
{Words(``, `\b`, `Address`, `Byte`, `Boolean`, `Character`, `Controlled`, `Count`, `Cursor`, `Duration`, `File_Mode`, `File_Type`, `Float`, `Generator`, `Integer`, `Long_Float`, `Long_Integer`, `Long_Long_Float`, `Long_Long_Integer`, `Natural`, `Positive`, `Reference_Type`, `Short_Float`, `Short_Integer`, `Short_Short_Float`, `Short_Short_Integer`, `String`, `Wide_Character`, `Wide_String`), KeywordType, nil},
|
||||||
|
{`(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b`, OperatorWord, nil},
|
||||||
|
{`generic|private`, KeywordDeclaration, nil},
|
||||||
|
{`package`, KeywordDeclaration, Push("package")},
|
||||||
|
{`array\b`, KeywordReserved, Push("array_def")},
|
||||||
|
{`(with|use)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
|
||||||
|
{`(\w+)(\s*)(:)(\s*)(constant)`, ByGroups(NameConstant, Text, Punctuation, Text, KeywordReserved), nil},
|
||||||
|
{`<<\w+>>`, NameLabel, nil},
|
||||||
|
{`(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)`, ByGroups(NameLabel, Text, Punctuation, Text, KeywordReserved), nil},
|
||||||
|
{Words(`\b`, `\b`, `abort`, `abs`, `abstract`, `accept`, `access`, `aliased`, `all`, `array`, `at`, `begin`, `body`, `case`, `constant`, `declare`, `delay`, `delta`, `digits`, `do`, `else`, `elsif`, `end`, `entry`, `exception`, `exit`, `interface`, `for`, `goto`, `if`, `is`, `limited`, `loop`, `new`, `null`, `of`, `or`, `others`, `out`, `overriding`, `pragma`, `protected`, `raise`, `range`, `record`, `renames`, `requeue`, `return`, `reverse`, `select`, `separate`, `subtype`, `synchronized`, `task`, `tagged`, `terminate`, `then`, `type`, `until`, `when`, `while`, `xor`), KeywordReserved, nil},
|
||||||
|
{`"[^"]*"`, LiteralString, nil},
|
||||||
|
Include("attribute"),
|
||||||
|
Include("numbers"),
|
||||||
|
{`'[^']'`, LiteralStringChar, nil},
|
||||||
|
{`(\w+)(\s*|[(,])`, ByGroups(Name, UsingSelf("root")), nil},
|
||||||
|
{`(<>|=>|:=|[()|:;,.'])`, Punctuation, nil},
|
||||||
|
{`[*<>+=/&-]`, Operator, nil},
|
||||||
|
{`\n+`, Text, nil},
|
||||||
|
},
|
||||||
|
"numbers": {
|
||||||
|
{`[0-9_]+#[0-9a-f]+#`, LiteralNumberHex, nil},
|
||||||
|
{`[0-9_]+\.[0-9_]*`, LiteralNumberFloat, nil},
|
||||||
|
{`[0-9_]+`, LiteralNumberInteger, nil},
|
||||||
|
},
|
||||||
|
"attribute": {
|
||||||
|
{`(')(\w+)`, ByGroups(Punctuation, NameAttribute), nil},
|
||||||
|
},
|
||||||
|
"subprogram": {
|
||||||
|
{`\(`, Punctuation, Push("#pop", "formal_part")},
|
||||||
|
{`;`, Punctuation, Pop(1)},
|
||||||
|
{`is\b`, KeywordReserved, Pop(1)},
|
||||||
|
{`"[^"]+"|\w+`, NameFunction, nil},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
{`(if|case|record|loop|select)`, KeywordReserved, nil},
|
||||||
|
{`"[^"]+"|[\w.]+`, NameFunction, nil},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`;`, Punctuation, Pop(1)},
|
||||||
|
},
|
||||||
|
"type_def": {
|
||||||
|
{`;`, Punctuation, Pop(1)},
|
||||||
|
{`\(`, Punctuation, Push("formal_part")},
|
||||||
|
{`with|and|use`, KeywordReserved, nil},
|
||||||
|
{`array\b`, KeywordReserved, Push("#pop", "array_def")},
|
||||||
|
{`record\b`, KeywordReserved, Push("record_def")},
|
||||||
|
{`(null record)(;)`, ByGroups(KeywordReserved, Punctuation), Pop(1)},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"array_def": {
|
||||||
|
{`;`, Punctuation, Pop(1)},
|
||||||
|
{`(\w+)(\s+)(range)`, ByGroups(KeywordType, Text, KeywordReserved), nil},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"record_def": {
|
||||||
|
{`end record`, KeywordReserved, Pop(1)},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"import": {
|
||||||
|
{`[\w.]+`, NameNamespace, Pop(1)},
|
||||||
|
Default(Pop(1)),
|
||||||
|
},
|
||||||
|
"formal_part": {
|
||||||
|
{`\)`, Punctuation, Pop(1)},
|
||||||
|
{`\w+`, NameVariable, nil},
|
||||||
|
{`,|:[^=]`, Punctuation, nil},
|
||||||
|
{`(in|not|null|out|access)\b`, KeywordReserved, nil},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"package": {
|
||||||
|
{`body`, KeywordDeclaration, nil},
|
||||||
|
{`is\s+new|renames`, KeywordReserved, nil},
|
||||||
|
{`is`, KeywordReserved, Pop(1)},
|
||||||
|
{`;`, Punctuation, Pop(1)},
|
||||||
|
{`\(`, Punctuation, Push("package_instantiation")},
|
||||||
|
{`([\w.]+)`, NameClass, nil},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
"package_instantiation": {
|
||||||
|
{`("[^"]+"|\w+)(\s+)(=>)`, ByGroups(NameVariable, Text, Punctuation), nil},
|
||||||
|
{`[\w.\'"]`, Text, nil},
|
||||||
|
{`\)`, Punctuation, Pop(1)},
|
||||||
|
Include("root"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
47
vendor/github.com/alecthomas/chroma/lexers/a/al.go
generated
vendored
Normal file
47
vendor/github.com/alecthomas/chroma/lexers/a/al.go
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Al lexer.
|
||||||
|
var Al = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "AL",
|
||||||
|
Aliases: []string{"al"},
|
||||||
|
Filenames: []string{"*.al", "*.dal"},
|
||||||
|
MimeTypes: []string{"text/x-al"},
|
||||||
|
DotAll: true,
|
||||||
|
CaseInsensitive: true,
|
||||||
|
},
|
||||||
|
alRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage
|
||||||
|
func alRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil},
|
||||||
|
{`(?s)//.*?\n`, CommentSingle, nil},
|
||||||
|
{`\"([^\"])*\"`, Text, nil},
|
||||||
|
{`'([^'])*'`, LiteralString, nil},
|
||||||
|
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
|
||||||
|
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
|
||||||
|
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
|
||||||
|
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil},
|
||||||
|
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
|
||||||
|
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
|
||||||
|
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
|
||||||
|
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
|
||||||
|
{`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
|
||||||
|
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
|
||||||
|
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
|
||||||
|
{`[;:,]`, Punctuation, nil},
|
||||||
|
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
|
||||||
|
{`\w+`, Text, nil},
|
||||||
|
{`.`, Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user