From 942f92c477904a0cf832371c4d0a03b89aa9b5cf Mon Sep 17 00:00:00 2001 From: Sebastian Frank Date: Wed, 15 Nov 2023 07:00:12 +0000 Subject: [PATCH] yarn package upgrades, ssr update --- Makefile | 17 +- api/collections/medialib.yml | 6 +- api/hooks/config-client.js | 2 + api/hooks/config.js | 30 +- api/hooks/lib/ssr-server.js | 65 + api/hooks/lib/ssr.js | 169 +++ api/hooks/lib/utils.js | 33 - api/hooks/ssr/get_read.js | 129 +- docker-compose-local.yml | 10 +- esbuild.config.js | 2 +- esbuild.config.server.js | 2 +- frontend/.htaccess | 14 + frontend/src/config.ts | 7 + frontend/src/sentry.ts | 42 + package.json | 49 +- tsconfig.json | 3 +- types/global.d.ts | 27 + yarn.lock | 2585 ++++++++++++++++++---------------- 18 files changed, 1800 insertions(+), 1392 deletions(-) create mode 100644 api/hooks/lib/ssr-server.js create mode 100644 api/hooks/lib/ssr.js create mode 100644 frontend/src/sentry.ts create mode 100644 types/global.d.ts diff --git a/Makefile b/Makefile index 5dbc52a..c4b2ddd 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,8 @@ DOCKER_COMPOSE=docker compose -f docker-compose-local.yml -DOCKER_ALL_PROFILES=--profile docpress --profile tibi-dev --profile tibi --profile chisel .DEFAULT_GOAL := help -.PHONY: docker-up docker-up-tibi-dev docker-up-chisel docker-up-docpress docker-start docker-start-tibi-dev docker-down docker-ps docker-logs docker-pull yarn-upgrade fix-permissions +.PHONY: docker-up docker-up-tibi-dev docker-start docker-start-tibi-dev docker-down docker-ps docker-logs yarn-upgrade fix-permissions include ./.env @@ -21,25 +20,25 @@ docker-up-chisel: ## bring up chisel tunnel $(DOCKER_COMPOSE) --profile chisel up -d docker-down: ## take docker compose stack down - $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) down + $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel down docker-start: ## start docker compose stack in foreground and take it down after CTRL-C - $(DOCKER_COMPOSE) --profile tibi up; $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) down + $(DOCKER_COMPOSE) --profile tibi up; $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel down docker-start-tibi-dev: ## start docker compose stack in foreground and take it down after CTRL-C (with tibi-dev) - $(DOCKER_COMPOSE) --profile tibi-dev up; $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) down + $(DOCKER_COMPOSE) --profile tibi-dev up; $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel down docker-ps: ## show container state - $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) ps + $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel ps docker-logs: ## show docker logs and follow - $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) logs -f || true + $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel logs -f --tail=100 || true docker-pull: ## pull docker images - $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) pull + $(DOCKER_COMPOSE) --profile tibi-dev --profile tibi --profile chisel pull docker-%: - $(DOCKER_COMPOSE) $(DOCKER_ALL_PROFILES) $* + $(DOCKER_COMPOSE) $* yarn-upgrade: # interactive yarn upgrade $(DOCKER_COMPOSE) run --rm yarnstart yarn upgrade-interactive diff --git a/api/collections/medialib.yml b/api/collections/medialib.yml index cef4a41..e4ea372 100644 --- a/api/collections/medialib.yml +++ b/api/collections/medialib.yml @@ -17,9 +17,6 @@ meta: # Die Bildgröße für die Einbindung ins erzeugte HTML des ContentBuilder # hat hiermit nix zu tun. defaultImageFilter: s - multiupload: - fields: - - source: description multiupload: fields: @@ -32,7 +29,6 @@ meta: return "Title" + $file.name })() - # Wird unter "image-/file-/videoSelect" im ContentBuilder Feld kein # "subNavigation" Index definiert, werden auch folgende "views" # verwendet. @@ -47,7 +43,7 @@ meta: secondaryText: source: title filter: true - + tertiaryText: source: description filter: true diff --git a/api/hooks/config-client.js b/api/hooks/config-client.js index 0736b02..b618228 100644 --- a/api/hooks/config-client.js +++ b/api/hooks/config-client.js @@ -1,4 +1,5 @@ const release = "tibi-docs.dirty" +const apiClientBaseURL = "/api/" // @ts-ignore if (release && typeof context !== "undefined") { @@ -7,4 +8,5 @@ if (release && typeof context !== "undefined") { module.exports = { release, + apiClientBaseURL, } diff --git a/api/hooks/config.js b/api/hooks/config.js index 50301c9..6504b80 100644 --- a/api/hooks/config.js +++ b/api/hooks/config.js @@ -1,19 +1,35 @@ +const publishedFilter = { + $or: [ + { publishDate: { $exists: false } }, + { publishDate: null }, + { + publishDate: { $lte: { $date: new Date().toISOString() } }, + // publishDate: { $lte: new Date() }, + }, + ], +} + +const apiSsrBaseURL = "http://localhost:8080/api/v1/_/demo/" + module.exports = { - ssrValidatePath: function (path) { + publishedFilter, + apiSsrBaseURL, + ssrValidatePath: function (/** @type {string} */ path) { // validate if path ssr rendering is ok, -1 = NOTFOUND, 0 = NO SSR, 1 = SSR // pe. use context.readCollection("product", {filter: {path: path}}) ... to validate dynamic urls - // / is de home - if (path == "/") return 1 + // // / is de home + // if (path == "/") return 1 - // all other sites are in db - path = path?.replace(/^\//, "") + // // all other sites are in db + // path = path?.replace(/^\//, "") // filter for path or alternativePaths const resp = context.db.find("content", { filter: { - $or: [{ path }, { "alternativePaths.path": path }], + $and: [{ $or: [{ path }, { "alternativePaths.path": path }] }, publishedFilter], }, + selector: { _id: 1 }, }) if (resp && resp.length) { @@ -23,5 +39,5 @@ module.exports = { // not found return -1 }, - ssrAllowedAPIEndpoints: ["content", "medialib"], + ssrPublishCheckCollections: ["content"], } diff --git a/api/hooks/lib/ssr-server.js b/api/hooks/lib/ssr-server.js new file mode 100644 index 0000000..c0a4557 --- /dev/null +++ b/api/hooks/lib/ssr-server.js @@ -0,0 +1,65 @@ +const { apiSsrBaseURL, ssrPublishCheckCollections } = require("../config") + +/** + * api request via server, cache result in context.ssrCache + * should be elimated in client code via tree shaking + * + * @param {string} cacheKey + * @param {string} endpoint + * @param {string} query + * @param {ApiOptions} options + * @returns {ApiResult} + */ +function ssrRequest(cacheKey, endpoint, query, options) { + let url = endpoint + (query ? "?" + query : "") + + if (ssrPublishCheckCollections.includes(endpoint)) { + // @ts-ignore + let validUntil = context.ssrCacheValidUntil + + // check in db for publish date to invalidate cache + const _optionsPublishSearch = Object.assign( + {}, + { filter: options?.filter }, + { + selector: { publishDate: 1 }, + // projection: null, + } + ) + const publishSearch = context.db.find(endpoint, _optionsPublishSearch) + publishSearch?.forEach((item) => { + const publishDate = item.publishDate ? new Date(item.publishDate.unixMilli()) : null + + if (publishDate && publishDate > new Date()) { + // entry has a publish date in the future, set global validUntil + if (validUntil == null || validUntil > publishDate) { + validUntil = publishDate + } + } + }) + // @ts-ignore + context.ssrCacheValidUntil = validUntil + } + + // console.log("############ FETCHING ", apiSsrBaseURL + url) + + const response = context.http.fetch(apiSsrBaseURL + url, { + method: options.method, + headers: options.headers, + }) + + const json = response.body.json() + const count = parseInt(response.headers["x-results-count"] || "0") + + // json is go data structure and incompatible with js, so we need to convert it + const r = { data: JSON.parse(JSON.stringify(json)), count: count } + + // @ts-ignore + context.ssrCache[cacheKey] = r + + return r +} + +module.exports = { + ssrRequest, +} diff --git a/api/hooks/lib/ssr.js b/api/hooks/lib/ssr.js new file mode 100644 index 0000000..d0432f6 --- /dev/null +++ b/api/hooks/lib/ssr.js @@ -0,0 +1,169 @@ +const { apiClientBaseURL } = require("../config-client") + +/** + * convert object to string + * @param {any} obj object + */ +function obj2str(obj) { + if (Array.isArray(obj)) { + return JSON.stringify( + obj.map(function (idx) { + return obj2str(idx) + }) + ) + } else if (typeof obj === "object" && obj !== null) { + var elements = Object.keys(obj) + .sort() + .map(function (key) { + var val = obj2str(obj[key]) + if (val) { + return key + ":" + val + } + }) + + var elementsCleaned = [] + for (var i = 0; i < elements.length; i++) { + if (elements[i]) elementsCleaned.push(elements[i]) + } + + return "{" + elementsCleaned.join("|") + "}" + } + + if (obj) return obj +} + +// fetch polyfill +// [MIT License](LICENSE.md) © [Jason Miller](https://jasonformat.com/) +const _f = function (url, options) { + if (typeof XMLHttpRequest === "undefined") { + return Promise.resolve(null) + } + + options = options || {} + return new Promise((resolve, reject) => { + const request = new XMLHttpRequest() + const keys = [] + const all = [] + const headers = {} + + const response = () => ({ + ok: ((request.status / 100) | 0) == 2, // 200-299 + statusText: request.statusText, + status: request.status, + url: request.responseURL, + text: () => Promise.resolve(request.responseText), + json: () => Promise.resolve(request.responseText).then(JSON.parse), + blob: () => Promise.resolve(new Blob([request.response])), + clone: response, + headers: { + // @ts-ignore + keys: () => keys, + // @ts-ignore + entries: () => all, + get: (n) => headers[n.toLowerCase()], + has: (n) => n.toLowerCase() in headers, + }, + }) + + request.open(options.method || "get", url, true) + + request.onload = () => { + request + .getAllResponseHeaders() + // @ts-ignore + .replace(/^(.*?):[^\S\n]*([\s\S]*?)$/gm, (m, key, value) => { + keys.push((key = key.toLowerCase())) + all.push([key, value]) + headers[key] = headers[key] ? `${headers[key]},${value}` : value + }) + resolve(response()) + } + + request.onerror = reject + + request.withCredentials = options.credentials == "include" + + for (const i in options.headers) { + request.setRequestHeader(i, options.headers[i]) + } + + request.send(options.body || null) + }) +} + +const _fetch = typeof fetch === "undefined" ? (typeof window === "undefined" ? _f : window.fetch || _f) : fetch + +/** + * api request via client or server + * server function ssrRequest is called via context.ssrRequest, binded in ssr hook + * + * @param {string} endpoint + * @param {ApiOptions} options + * @returns {Promise} + */ +function apiRequest(endpoint, options) { + // first check cache if on client + const cacheKey = obj2str({ endpoint: endpoint, options: options }) + + // @ts-ignore + if (typeof window !== "undefined" && window.__SSR_CACHE__) { + // @ts-ignore + const cache = window.__SSR_CACHE__[cacheKey] + console.log("SSR:", cacheKey, cache) + if (cache) { + return Promise.resolve(cache) + } + } + + let method = "GET" + + let query = "&count=1" + if (options?.filter) query += "&filter=" + encodeURIComponent(JSON.stringify(options.filter)) + if (options?.sort) query += "&sort=" + options.sort + "&sort=_id" + if (options?.limit) query += "&limit=" + options.limit + if (options?.offset) query += "&offset=" + options.offset + if (options?.projection) query += "&projection=" + options.projection + if (options?.lookup) query += "&lookup=" + options.lookup + + if (options?.params) { + Object.keys(options.params).forEach((p) => { + query += "&" + p + "=" + encodeURIComponent(options.params[p]) + }) + } + + let headers = { + "Content-Type": "application/json", + } + + if (options?.headers) headers = { ...headers, ...options.headers } + + if (typeof window === "undefined") { + // server + + // reference via context from get hook to tree shake in client + // @ts-ignore + const d = context.ssrRequest(cacheKey, endpoint, query, Object.assign({}, options, { method, headers })) + return d + } else { + // client + let url = endpoint + (query ? "?" + query : "") + console.log("fetch", apiClientBaseURL + url) + return _fetch(apiClientBaseURL + url, { + method, + mode: "cors", + headers, + }).then((response) => { + return response?.json().then((json) => { + if (response?.status < 200 || response?.status >= 400) { + return Promise.reject({ response, data: json }) + } + return Promise.resolve({ data: json || null, count: response.headers?.get("x-results-count") || 0 }) + }) + }) + } +} + +module.exports = { + obj2str, + apiRequest, +} diff --git a/api/hooks/lib/utils.js b/api/hooks/lib/utils.js index 922ef24..7ea6899 100644 --- a/api/hooks/lib/utils.js +++ b/api/hooks/lib/utils.js @@ -6,38 +6,6 @@ function log(str) { console.log(JSON.stringify(str, undefined, 4)) } -/** - * convert object to string - * @param {any} obj object - */ -function obj2str(obj) { - if (Array.isArray(obj)) { - return JSON.stringify( - obj.map(function (idx) { - return obj2str(idx) - }) - ) - } else if (typeof obj === "object" && obj !== null) { - var elements = Object.keys(obj) - .sort() - .map(function (key) { - var val = obj2str(obj[key]) - if (val) { - return key + ":" + val - } - }) - - var elementsCleaned = [] - for (var i = 0; i < elements.length; i++) { - if (elements[i]) elementsCleaned.push(elements[i]) - } - - return "{" + elementsCleaned.join("|") + "}" - } - - if (obj) return obj -} - /** * clear SSR cache */ @@ -49,5 +17,4 @@ function clearSSRCache() { module.exports = { log, clearSSRCache, - obj2str, } diff --git a/api/hooks/ssr/get_read.js b/api/hooks/ssr/get_read.js index 90bff2e..116b3fb 100644 --- a/api/hooks/ssr/get_read.js +++ b/api/hooks/ssr/get_read.js @@ -1,16 +1,23 @@ -const { ssrValidatePath, ssrAllowedAPIEndpoints } = require("../config") +// TODO: add query string functionality to cache + +const { ssrValidatePath } = require("../config") +const { log } = require("../lib/utils") +const { ssrRequest } = require("../lib/ssr-server") -const { obj2str, log } = require("../lib/utils") ;(function () { /** @type {HookResponse} */ - var response = null + // @ts-ignore + let response = null - var request = context.request() - var url = request.query("url") - var noCache = request.query("noCache") + const request = context.request() + let url = request.query("url") + const noCache = request.query("noCache") // add sentry trace id to head - var trace_id = context.debug.sentryTraceId() + const trace_id = context.debug.sentryTraceId() + /** + * @param {string} content + */ function addSentryTrace(content) { return content.replace("", '') } @@ -18,7 +25,9 @@ const { obj2str, log } = require("../lib/utils") if (url) { // comment will be printed to html later - var comment = "" + let comment = "" + /** @type {Date} */ // @ts-ignore + context.ssrCacheValidUntil = null url = url.split("?")[0] comment += "url: " + url @@ -31,7 +40,8 @@ const { obj2str, log } = require("../lib/utils") } // check if url is in cache - var cache = + /** @type {Ssr[]} */ // @ts-ignore + const cache = !noCache && context.db.find("ssr", { filter: { @@ -39,93 +49,70 @@ const { obj2str, log } = require("../lib/utils") }, }) if (cache && cache.length) { - // use cache - throw { - status: 200, - log: false, - html: addSentryTrace(cache[0].content), + const validUntil = cache[0].validUntil ? new Date(cache[0].validUntil.unixMilli()) : null + // context.debug.dump("cache validUntil", validUntil) + if (!validUntil || validUntil > new Date()) { + // context.debug.dump("using cache") + // use cache + context.response.header("X-SSR-Cache", "true") + throw { + status: 200, + log: false, + html: addSentryTrace(cache[0].content), + } + } else { + // cache is invalid, delete it + context.response.header("X-SSR-Cache", "invalid") + // @ts-ignore + context.db.delete("ssr", cache[0].id) } } // validate url - var status = 200 + let status = 200 - var pNorender = false - var pNotfound = false + let pNorender = false + let pNotfound = false - var pR = ssrValidatePath(url) + const pR = ssrValidatePath(url) if (pR < 0) { pNotfound = true } else if (!pR) { pNorender = true } - var head = "" - var html = "" - var error = "" + let head = "" + let html = "" + let error = "" comment += ", path: " + url - var cacheIt = false + let cacheIt = false if (pNorender) { html = "" } else if (pNotfound) { status = 404 html = "404 NOT FOUND" } else { + // @ts-ignore + context.ssrCache = {} + // @ts-ignore + context.ssrRequest = ssrRequest + // try rendering, if error output plain html try { - // @ts-ignore - context.ssrCache = {} - // @ts-ignore - context.ssrFetch = function (endpoint, options) { - var data - if (ssrAllowedAPIEndpoints.indexOf(endpoint) > -1) { - var _options = Object.assign({}, options) - - if (_options.sort) _options.sort = [_options.sort] - - try { - /*console.log( - "SSR", - endpoint, - JSON.stringify(_options) - )*/ - var goSlice = context.db.find(endpoint, _options || {}) - // need to deep copy, so shift and delete on pure js is possible - data = JSON.parse(JSON.stringify(goSlice)) - } catch (e) { - console.log("ERROR", JSON.stringify(e)) - data = [] - } - } else { - console.log("SSR forbidden", endpoint) - data = [] - } - - var count = (data && data.length) || 0 - if (options && count == options.limit) { - // read count from db - count = context.db.count(endpoint, _options || {}) - } - var r = { data: data, count: count } - - // @ts-ignore - context.ssrCache[obj2str({ endpoint: endpoint, options: options })] = r - - return r - } - // include App.svelte and render it // @ts-ignore - var app = require("../lib/app.server") - var rendered = app.default.render({ + + // console.log("####### RENDERING ", url) + const app = require("../lib/app.server") + const rendered = app.default.render({ url: url, }) head = rendered.head html = rendered.html - // add ssrCache to head + // add ssrCache to head, cache is built in ssr.js/apiRequest head += "\n\n" + "