mark2web/pkg/webrequest/request.go

110 lines
2.4 KiB
Go
Raw Permalink Normal View History

2019-03-27 12:52:30 +01:00
package webrequest
2019-03-18 13:34:52 +01:00
import (
"encoding/json"
2019-03-25 15:07:02 +01:00
"fmt"
"image"
2019-03-18 13:34:52 +01:00
"io/ioutil"
"net/http"
"strings"
2019-03-25 09:28:58 +01:00
2019-03-27 12:52:30 +01:00
"gitbase.de/apairon/mark2web/pkg/progress"
2019-03-25 09:28:58 +01:00
"gitbase.de/apairon/mark2web/pkg/logger"
2019-03-18 13:34:52 +01:00
)
2019-03-25 15:07:02 +01:00
type wrImageEntry struct {
img image.Image
format string
}
type wrJSONEntry struct {
data interface{}
}
var wrImageCache = make(map[string]*wrImageEntry)
var wrJSONCache = make(map[string]*wrJSONEntry)
2019-03-27 12:52:30 +01:00
// Get will fetch an url and returns reponse
func Get(url string, opts interface{}) (resp *http.Response, err error) {
2019-03-29 15:49:25 +01:00
logger.N("requesting url via GET %s", url)
2019-03-25 15:07:02 +01:00
2019-03-27 12:52:30 +01:00
progress.IncrTotal("web request")
progress.DescribeCurrent("web request", url)
resp, err = http.Get(url)
progress.IncrDone("web request")
return resp, err
2019-03-25 15:07:02 +01:00
}
2019-03-27 12:52:30 +01:00
// GetJSON will GET a json object/array from a given URL
func GetJSON(url string) interface{} {
2019-03-25 15:07:02 +01:00
cached := wrJSONCache[url]
if cached == nil {
2019-03-27 12:52:30 +01:00
resp, err := Get(url, nil)
2019-03-29 15:49:25 +01:00
logger.Eexit(err, "could not get url '%s'", url)
2019-03-25 15:07:02 +01:00
defer resp.Body.Close()
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
body, err := ioutil.ReadAll(resp.Body)
2019-03-29 15:49:25 +01:00
logger.Eexit(err, "could not read body from url '%s'", url)
2019-03-18 13:34:52 +01:00
2019-03-29 15:49:25 +01:00
logger.D("output from url '%s':\n%s", url, string(body))
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
if resp.StatusCode >= 400 {
2019-03-29 15:49:25 +01:00
logger.Exit("bad status '%d - %s' from url '%s'", resp.StatusCode, resp.Status, url)
2019-03-25 15:07:02 +01:00
}
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
contentType := resp.Header.Get("Content-Type")
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
if strings.Contains(contentType, "json") {
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
} else {
2019-03-29 15:49:25 +01:00
logger.Exit("is not json '%s' from url '%s'", contentType, url)
2019-03-25 15:07:02 +01:00
}
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
cached = new(wrJSONEntry)
jsonMap := make(map[string]interface{})
err = json.Unmarshal(body, &jsonMap)
if err == nil {
cached.data = jsonMap
} else {
jsonArrayMap := make([]map[string]interface{}, 0)
err = json.Unmarshal(body, &jsonArrayMap)
if err == nil {
cached.data = jsonArrayMap
} else {
2019-03-29 15:49:25 +01:00
logger.Exit("could not read json from '%s': invalid type", url)
2019-03-25 15:07:02 +01:00
}
}
wrJSONCache[url] = cached
2019-03-18 13:34:52 +01:00
}
2019-03-25 15:07:02 +01:00
return cached.data
}
2019-03-27 12:52:30 +01:00
// GetImage gets an image from an url
func GetImage(url string) (image.Image, string, error) {
2019-03-25 15:07:02 +01:00
cached := wrImageCache[url]
if cached == nil {
2019-03-27 12:52:30 +01:00
resp, err := Get(url, nil)
2019-03-25 15:07:02 +01:00
if err != nil {
return nil, "", fmt.Errorf("could not get url '%s': %s", url, err)
}
img, format, err := image.Decode(resp.Body)
if err != nil {
return nil, "", fmt.Errorf("could read body from url '%s': %s", url, err)
}
cached = &wrImageEntry{
img: img,
format: format,
}
2019-03-18 13:34:52 +01:00
2019-03-25 15:07:02 +01:00
wrImageCache[url] = cached
2019-03-18 13:34:52 +01:00
}
2019-03-25 15:07:02 +01:00
return cached.img, cached.format, nil
2019-03-18 13:34:52 +01:00
}