cljs worker

This commit is contained in:
Tienson Qin
2025-12-27 15:57:31 +08:00
parent 2a96243474
commit 58cda5538f
17 changed files with 794 additions and 634 deletions

1
.gitignore vendored
View File

@@ -68,6 +68,7 @@ deps/shui/.lsp
deps/shui/.lsp-cache
deps/shui/.clj-kondo
deps/publish/worker/.wrangler
deps/publish/worker/dist
tx-log*
clj-e2e/.wally

View File

@@ -1,4 +1,4 @@
{:paths ["src/main" "src/electron" "src/resources"]
{:paths ["src/publish-ssr" "src/main" "src/electron" "src/resources"]
:deps
{org.clojure/clojure {:mvn/version "1.11.1"}
rum/rum {:git/url "https://github.com/logseq/rum" ;; fork

View File

@@ -1,24 +0,0 @@
(ns logseq.publish
"Public entrypoint for page publishing shared logic."
(:require [logseq.publish.snapshot :as snapshot]
[logseq.publish.ssr :as ssr]
[logseq.publish.storage :as storage]))
(defn normalize-snapshot
"Public wrapper around snapshot normalization."
[snapshot-map]
(snapshot/normalize-snapshot snapshot-map))
(defn snapshot-valid?
"Checks required keys in the snapshot."
[snapshot-map]
(snapshot/snapshot-valid? snapshot-map))
(defn render-page-html
"Render HTML for a published page."
[snapshot-map opts]
(ssr/render-page-html snapshot-map opts))
(def PublishStore storage/PublishStore)
;; Placeholder namespace for page publishing shared logic.

View File

@@ -0,0 +1,11 @@
(ns logseq.publish.async
(:require [shadow.cljs.modern]))
(defmacro js-await
"Like `let` but for async values, executed sequentially.
Non-async values are wrapped in `js/Promise.resolve`."
[[a b & bindings] & body]
(let [b `(~'js/Promise.resolve ~b)]
(if (seq bindings)
`(shadow.cljs.modern/js-await ~[a b] (js-await ~bindings ~@body))
`(shadow.cljs.modern/js-await ~[a b] ~@body))))

View File

@@ -1,29 +0,0 @@
(ns logseq.publish.snapshot
"Utilities for shaping page publishing snapshot payloads.")
(def required-keys
#{:page :blocks :linked-refs :config})
(defn normalize-snapshot
"Ensures the snapshot contains the minimum required keys.
Expected shape:
{:page <page entity map>
:blocks <block tree or flat list>
:linked-refs <linked references payload>
:config <publishing config map>
:assets <optional asset map>}
"
[{:keys [page blocks linked-refs config] :as snapshot}]
(merge
{:page page
:blocks (or blocks [])
:linked-refs (or linked-refs [])
:config (or config {})
:assets (:assets snapshot)}
(select-keys snapshot required-keys)))
(defn snapshot-valid?
"Checks if required keys are present in the snapshot map."
[snapshot]
(every? #(contains? snapshot %) required-keys))

View File

@@ -1,15 +0,0 @@
(ns logseq.publish.ssr
"SSR helpers for published pages.")
(defn render-page-html
"Renders HTML for a published page.
Options:
- :render-page-fn should return HTML string for the given snapshot.
- :wrap-html-fn should wrap the rendered body with document-level markup.
"
[snapshot {:keys [render-page-fn wrap-html-fn]}]
(let [body (when render-page-fn (render-page-fn snapshot))]
(if wrap-html-fn
(wrap-html-fn body)
body)))

View File

@@ -1,9 +0,0 @@
(ns logseq.publish.storage
"Contracts for durable storage backends.")
(defprotocol PublishStore
"Storage for published page snapshots. Implementations should use SQLite as
the durable store and run the Logseq datascript fork on top of it."
(put-snapshot! [this page-id snapshot])
(get-snapshot [this page-id])
(delete-snapshot! [this page-id]))

View File

@@ -0,0 +1,601 @@
(ns logseq.publish.worker
(:require ["cloudflare:workers" :refer [DurableObject]]
[clojure.string :as string]
[logseq.db :as ldb]
[shadow.cljs.modern :refer (defclass)])
(:require-macros [logseq.publish.async :refer [js-await]]))
(def text-decoder (js/TextDecoder.))
(def text-encoder (js/TextEncoder.))
(defn cors-headers
[]
#js {"access-control-allow-origin" "*"
"access-control-allow-methods" "GET,POST,OPTIONS"
"access-control-allow-headers" "content-type,authorization,x-publish-meta,if-none-match"
"access-control-expose-headers" "etag"})
(defn merge-headers [base extra]
(let [headers (js/Headers. base)]
(doseq [[k v] (js/Object.entries extra)]
(.set headers k v))
headers))
(defn json-response
([data] (json-response data 200))
([data status]
(js/Response.
(js/JSON.stringify (clj->js data))
#js {:status status
:headers (merge-headers
#js {"content-type" "application/json"}
(cors-headers))})))
(defn unauthorized []
(json-response {:error "unauthorized"} 401))
(defn bad-request [message]
(json-response {:error message} 400))
(defn not-found []
(json-response {:error "not found"} 404))
(defn parse-meta-header [request]
(let [meta-header (.get (.-headers request) "x-publish-meta")]
(when meta-header
(try
(js/JSON.parse meta-header)
(catch :default _
nil)))))
(defn meta-from-body [buffer]
(try
(let [payload (ldb/read-transit-str (.decode text-decoder buffer))
meta (:publish/meta payload)]
(when meta
(clj->js meta)))
(catch :default _
nil)))
(defn valid-meta? [meta]
(and meta
(aget meta "publish/content-hash")
(aget meta "publish/graph")
(aget meta "page-uuid")))
(defn get-sql-rows [^js result]
(let [iter-fn (when result (aget result js/Symbol.iterator))]
(cond
(nil? result) []
(fn? (.-toArray result)) (.toArray result)
(fn? iter-fn) (vec (js/Array.from result))
(array? (.-results result)) (.-results result)
(array? (.-rows result)) (.-rows result)
(array? result) (if (empty? result)
[]
(let [first-row (first result)]
(cond
(array? (.-results first-row)) (.-results first-row)
(array? (.-rows first-row)) (.-rows first-row)
:else result)))
:else [])))
(defn sql-exec
[sql sql-str & args]
(.apply (.-exec sql) sql (to-array (cons sql-str args))))
(defn to-hex [buffer]
(->> (js/Uint8Array. buffer)
(array-seq)
(map (fn [b] (.padStart (.toString b 16) 2 "0")))
(apply str)))
(defn sha256-hex [message]
(js-await [data (.encode text-encoder message)
digest (.digest js/crypto.subtle "SHA-256" data)]
(to-hex digest)))
(defn hmac-sha256 [key message]
(js-await [crypto-key (.importKey js/crypto.subtle
"raw"
key
#js {:name "HMAC" :hash "SHA-256"}
false
#js ["sign"])]
(.sign js/crypto.subtle "HMAC" crypto-key message)))
(defn encode-rfc3986 [value]
(-> (js/encodeURIComponent value)
(.replace #"[!'()*]" (fn [c]
(str "%"
(.toUpperCase (.toString (.charCodeAt c 0) 16)))))))
(defn encode-path [path]
(->> (string/split path #"/")
(map encode-rfc3986)
(string/join "/")))
(defn get-signature-key [secret date-stamp region service]
(js-await [k-date (hmac-sha256
(.encode text-encoder (str "AWS4" secret))
(.encode text-encoder date-stamp))
k-region (hmac-sha256 k-date (.encode text-encoder region))
k-service (hmac-sha256 k-region (.encode text-encoder service))]
(hmac-sha256 k-service (.encode text-encoder "aws4_request"))))
(defn presign-r2-url [r2-key env]
(js-await [region "auto"
service "s3"
host (str (aget env "R2_ACCOUNT_ID") ".r2.cloudflarestorage.com")
bucket (aget env "R2_BUCKET")
method "GET"
now (js/Date.)
amz-date (.replace (.toISOString now) #"[ :-]|\.\d{3}" "")
date-stamp (.slice amz-date 0 8)
credential-scope (str date-stamp "/" region "/" service "/aws4_request")
params (->> [["X-Amz-Algorithm" "AWS4-HMAC-SHA256"]
["X-Amz-Credential" (str (aget env "R2_ACCESS_KEY_ID") "/" credential-scope)]
["X-Amz-Date" amz-date]
["X-Amz-Expires" "300"]
["X-Amz-SignedHeaders" "host"]]
(sort-by first))
canonical-query (->> params
(map (fn [[k v]]
(str (encode-rfc3986 k) "=" (encode-rfc3986 v))))
(string/join "&"))
canonical-uri (str "/" bucket "/" (encode-path r2-key))
canonical-headers (str "host:" host "\n")
signed-headers "host"
payload-hash "UNSIGNED-PAYLOAD"
canonical-request (string/join "\n"
[method
canonical-uri
canonical-query
canonical-headers
signed-headers
payload-hash])
canonical-hash (sha256-hex canonical-request)
string-to-sign (string/join "\n"
["AWS4-HMAC-SHA256"
amz-date
credential-scope
canonical-hash])
signing-key (get-signature-key (aget env "R2_SECRET_ACCESS_KEY")
date-stamp
region
service)
raw-signature (hmac-sha256 signing-key (.encode text-encoder string-to-sign))
signature (to-hex raw-signature)
signed-query (str canonical-query "&X-Amz-Signature=" signature)]
(str "https://" host canonical-uri "?" signed-query)))
(defn base64url->uint8array [input]
(let [pad (if (pos? (mod (count input) 4))
(apply str (repeat (- 4 (mod (count input) 4)) "="))
"")
base64 (-> (str input pad)
(string/replace "-" "+")
(string/replace "_" "/"))
raw (js/atob base64)
bytes (js/Uint8Array. (.-length raw))]
(dotimes [i (.-length raw)]
(aset bytes i (.charCodeAt raw i)))
bytes))
(defn decode-jwt-part [part]
(let [bytes (base64url->uint8array part)]
(js/JSON.parse (.decode text-decoder bytes))))
(defn import-rsa-key [jwk]
(.importKey js/crypto.subtle
"jwk"
jwk
#js {:name "RSASSA-PKCS1-v1_5" :hash "SHA-256"}
false
#js ["verify"]))
(defn verify-jwt [token env]
(js-await [parts (string/split token #"\.")
_ (when (not= 3 (count parts)) (throw (ex-info "invalid" {})))
header-part (nth parts 0)
payload-part (nth parts 1)
signature-part (nth parts 2)
header (decode-jwt-part header-part)
payload (decode-jwt-part payload-part)
issuer (aget env "COGNITO_ISSUER")
client-id (aget env "COGNITO_CLIENT_ID")
_ (when (not= (aget payload "iss") issuer) (throw (ex-info "iss" {})))
_ (when (not= (aget payload "aud") client-id) (throw (ex-info "aud" {})))
now (js/Math.floor (/ (.now js/Date) 1000))
_ (when (and (aget payload "exp") (< (aget payload "exp") now))
(throw (ex-info "exp" {})))
jwks-resp (js/fetch (aget env "COGNITO_JWKS_URL"))
_ (when-not (.-ok jwks-resp) (throw (ex-info "jwks" {})))
jwks (.json jwks-resp)
keys (or (aget jwks "keys") #js [])
key (.find keys (fn [k] (= (aget k "kid") (aget header "kid"))))
_ (when-not key (throw (ex-info "kid" {})))
crypto-key (import-rsa-key key)
data (.encode text-encoder (str header-part "." payload-part))
signature (base64url->uint8array signature-part)
ok (.verify js/crypto.subtle
"RSASSA-PKCS1-v1_5"
crypto-key
signature
data)]
(when ok payload)))
(defn normalize-etag [etag]
(when etag
(string/replace etag #"\"" "")))
(defn merge-attr
[entity attr value]
(let [existing (get entity attr ::none)]
(cond
(= existing ::none) (assoc entity attr value)
(vector? existing) (assoc entity attr (conj existing value))
(set? existing) (assoc entity attr (conj existing value))
:else (assoc entity attr [existing value]))))
(defn datoms->entities
[datoms]
(reduce
(fn [acc datom]
(let [[e a v _tx added?] datom]
(if added?
(update acc e merge-attr a v)
acc)))
{}
datoms))
(defn entity->title
[entity]
(or (:block/title entity)
(:block/name entity)
"Untitled"))
(defn render-blocks
[blocks]
(let [sorted (sort-by (fn [block]
(or (:block/order block) (:block/uuid block) ""))
blocks)]
(str "<ul class=\"blocks\">"
(apply str
(map (fn [block]
(let [content (or (:block/content block) "")]
(str "<li class=\"block\">"
"<div class=\"block-content\">"
(string/escape content {"&" "&amp;"
"<" "&lt;"
">" "&gt;"})
"</div>"
"</li>")))
sorted))
"</ul>")))
(defn render-page-html
[transit page-uuid-str]
(let [payload (ldb/read-transit-str transit)
datoms (:datoms payload)
entities (datoms->entities datoms)
page-uuid (uuid page-uuid-str)
page-entity (some (fn [[_e entity]]
(when (= (:block/uuid entity) page-uuid)
entity))
entities)
page-title (entity->title page-entity)
page-eid (some (fn [[e entity]]
(when (= (:block/uuid entity) page-uuid)
e))
entities)
blocks (->> entities
(keep (fn [[_e entity]]
(when (= (:block/page entity) page-eid)
entity)))
(remove #(= (:block/uuid %) page-uuid)))]
(str "<!doctype html>"
"<html><head><meta charset=\"utf-8\"/>"
"<meta name=\"viewport\" content=\"width=device-width,initial-scale=1\"/>"
"<title>" (string/escape page-title {"&" "&amp;" "<" "&lt;" ">" "&gt;"}) "</title>"
"<style>"
"body{margin:0;background:#fbf8f3;color:#1b1b1b;font-family:Georgia,serif;}"
".wrap{max-width:880px;margin:0 auto;padding:40px 24px;}"
"h1{font-size:30px;margin:0 0 20px;font-weight:600;}"
".blocks{list-style:none;padding:0;margin:0;}"
".block{padding:8px 0;border-bottom:1px solid #eee6dc;}"
".block-content{white-space:pre-wrap;line-height:1.6;}"
"</style>"
"</head><body>"
"<main class=\"wrap\">"
"<h1>" (string/escape page-title {"&" "&amp;" "<" "&lt;" ">" "&gt;"}) "</h1>"
(render-blocks blocks)
"</main></body></html>")))
(defn handle-post-pages [request env]
(js-await [auth-header (.get (.-headers request) "authorization")
token (when (and auth-header (string/starts-with? auth-header "Bearer "))
(subs auth-header 7))
dev-skip? (= "true" (aget env "DEV_SKIP_AUTH"))
claims (cond
dev-skip? #js {:sub "dev"}
(nil? token) nil
:else (verify-jwt token env))]
(let [claims (if dev-skip? #js {:sub "dev"} claims)]
(if (and (not dev-skip?) (nil? claims))
(unauthorized)
(js-await [body (.arrayBuffer request)]
(let [meta (or (parse-meta-header request)
(meta-from-body body))]
(cond
(not (valid-meta? meta))
(bad-request "missing publish metadata")
:else
(js-await [r2-key (str "publish/" (aget meta "publish/graph") "/"
(aget meta "publish/content-hash") ".transit")
r2 (aget env "PUBLISH_R2")
existing (.head r2 r2-key)
_ (when-not existing
(.put r2 r2-key body
#js {:httpMetadata #js {:contentType "application/transit+json"}}))
^js do-ns (aget env "PUBLISH_META_DO")
do-id (.idFromName do-ns
(str (aget meta "publish/graph")
":"
(aget meta "page-uuid")))
do-stub (.get do-ns do-id)
payload (clj->js {:page-uuid (aget meta "page-uuid")
:publish/graph (aget meta "publish/graph")
:schema-version (aget meta "schema-version")
:block-count (aget meta "block-count")
:publish/content-hash (aget meta "publish/content-hash")
:publish/content-length (aget meta "publish/content-length")
:r2_key r2-key
:owner_sub (aget claims "sub")
:publish/created-at (aget meta "publish/created-at")
:updated_at (.now js/Date)})
meta-resp (.fetch do-stub "https://publish/pages"
#js {:method "POST"
:headers #js {"content-type" "application/json"}
:body (js/JSON.stringify payload)})]
(if-not (.-ok meta-resp)
(json-response {:error "metadata store failed"} 500)
(js-await [index-id (.idFromName do-ns "index")
index-stub (.get do-ns index-id)
_ (.fetch index-stub "https://publish/pages"
#js {:method "POST"
:headers #js {"content-type" "application/json"}
:body (js/JSON.stringify payload)})]
(json-response {:page_uuid (aget meta "page-uuid")
:graph_uuid (aget meta "publish/graph")
:r2_key r2-key
:updated_at (.now js/Date)})))))))))))
(defn handle-get-page [request env]
(let [url (js/URL. (.-url request))
parts (string/split (.-pathname url) #"/")
graph-uuid (nth parts 2 nil)
page-uuid (nth parts 3 nil)]
(if (or (nil? graph-uuid) (nil? page-uuid))
(bad-request "missing graph uuid or page uuid")
(js-await [^js do-ns (aget env "PUBLISH_META_DO")
do-id (.idFromName do-ns (str graph-uuid ":" page-uuid))
do-stub (.get do-ns do-id)
meta-resp (.fetch do-stub (str "https://publish/pages/" graph-uuid "/" page-uuid))]
(if-not (.-ok meta-resp)
(not-found)
(js-await [meta (.json meta-resp)
etag (aget meta "publish/content-hash")
if-none-match (normalize-etag (.get (.-headers request) "if-none-match"))]
(if (and etag if-none-match (= etag if-none-match))
(js/Response. nil #js {:status 304
:headers (merge-headers
#js {:etag etag}
(cors-headers))})
(json-response (js->clj meta :keywordize-keys false) 200))))))))
(defn handle-get-page-transit [request env]
(let [url (js/URL. (.-url request))
parts (string/split (.-pathname url) #"/")
graph-uuid (nth parts 2 nil)
page-uuid (nth parts 3 nil)]
(if (or (nil? graph-uuid) (nil? page-uuid))
(bad-request "missing graph uuid or page uuid")
(js-await [^js do-ns (aget env "PUBLISH_META_DO")
do-id (.idFromName do-ns (str graph-uuid ":" page-uuid))
do-stub (.get do-ns do-id)
meta-resp (.fetch do-stub (str "https://publish/pages/" graph-uuid "/" page-uuid))]
(if-not (.-ok meta-resp)
(not-found)
(js-await [meta (.json meta-resp)
r2-key (aget meta "r2_key")]
(if-not r2-key
(json-response {:error "missing transit"} 404)
(js-await [etag (aget meta "publish/content-hash")
if-none-match (normalize-etag (.get (.-headers request) "if-none-match"))
signed-url (when-not (and etag if-none-match (= etag if-none-match))
(presign-r2-url r2-key env))]
(if (and etag if-none-match (= etag if-none-match))
(js/Response. nil #js {:status 304
:headers (merge-headers
#js {:etag etag}
(cors-headers))})
(json-response {:url signed-url
:expires_in 300
:etag etag}
200))))))))))
(defn handle-list-pages [env]
(js-await [^js do-ns (aget env "PUBLISH_META_DO")
do-id (.idFromName do-ns "index")
do-stub (.get do-ns do-id)
meta-resp (.fetch do-stub "https://publish/pages" #js {:method "GET"})]
(if-not (.-ok meta-resp)
(not-found)
(js-await [meta (.json meta-resp)]
(json-response (js->clj meta :keywordize-keys false) 200)))))
(defn handle-page-html [request env]
(let [url (js/URL. (.-url request))
parts (string/split (.-pathname url) #"/")
graph-uuid (nth parts 2 nil)
page-uuid (nth parts 3 nil)]
(if (or (nil? graph-uuid) (nil? page-uuid))
(bad-request "missing graph uuid or page uuid")
(js-await [^js do-ns (aget env "PUBLISH_META_DO")
do-id (.idFromName do-ns (str graph-uuid ":" page-uuid))
do-stub (.get do-ns do-id)
meta-resp (.fetch do-stub (str "https://publish/pages/" graph-uuid "/" page-uuid))]
(if-not (.-ok meta-resp)
(not-found)
(js-await [meta (.json meta-resp)
r2 (aget env "PUBLISH_R2")
object (.get r2 (aget meta "r2_key"))]
(if-not object
(json-response {:error "missing transit blob"} 404)
(js-await [buffer (.arrayBuffer object)
transit (.decode text-decoder buffer)
html (render-page-html transit page-uuid)]
(js/Response.
html
#js {:headers (merge-headers
#js {"content-type" "text/html; charset=utf-8"}
(cors-headers))})))))))))
(defn handle-fetch [request env]
(let [url (js/URL. (.-url request))
path (.-pathname url)
method (.-method request)]
(cond
(= method "OPTIONS")
(js/Response. nil #js {:status 204 :headers (cors-headers)})
(and (string/starts-with? path "/p/") (= method "GET"))
(handle-page-html request env)
(and (= path "/pages") (= method "POST"))
(handle-post-pages request env)
(and (= path "/pages") (= method "GET"))
(handle-list-pages env)
(and (string/starts-with? path "/pages/") (= method "GET"))
(let [parts (string/split path #"/")]
(if (= (nth parts 4 nil) "transit")
(handle-get-page-transit request env)
(handle-get-page request env)))
:else
(not-found))))
(def worker
#js {:fetch (fn [request env _ctx]
(handle-fetch request env))})
(defn init-schema! [sql]
(let [cols (get-sql-rows (sql-exec sql "PRAGMA table_info(pages);"))
drop? (some #(contains? #{"page_id" "graph"} (aget % "name")) cols)]
(when drop?
(sql-exec sql "DROP TABLE IF EXISTS pages;"))
(sql-exec sql
(str "CREATE TABLE IF NOT EXISTS pages ("
"page_uuid TEXT NOT NULL,"
"graph_uuid TEXT NOT NULL,"
"schema_version TEXT,"
"block_count INTEGER,"
"content_hash TEXT NOT NULL,"
"content_length INTEGER,"
"r2_key TEXT NOT NULL,"
"owner_sub TEXT,"
"created_at INTEGER,"
"updated_at INTEGER,"
"PRIMARY KEY (graph_uuid, page_uuid)"
");"))))
(defn row->meta [row]
(let [data (js->clj row :keywordize-keys false)]
(assoc data
"publish/graph" (get data "graph_uuid")
"publish/content-hash" (get data "content_hash")
"publish/content-length" (get data "content_length"))))
(defn do-fetch [^js self request]
(let [sql (.-sql self)]
(init-schema! sql)
(cond
(= "POST" (.-method request))
(js-await [body (.json request)]
(sql-exec sql
(str "INSERT INTO pages ("
"page_uuid,"
"graph_uuid,"
"schema_version,"
"block_count,"
"content_hash,"
"content_length,"
"r2_key,"
"owner_sub,"
"created_at,"
"updated_at"
") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
" ON CONFLICT(graph_uuid, page_uuid) DO UPDATE SET"
" page_uuid=excluded.page_uuid,"
" schema_version=excluded.schema_version,"
" block_count=excluded.block_count,"
" content_hash=excluded.content_hash,"
" content_length=excluded.content_length,"
" r2_key=excluded.r2_key,"
" owner_sub=excluded.owner_sub,"
" updated_at=excluded.updated_at;")
(aget body "page-uuid")
(aget body "publish/graph")
(aget body "schema-version")
(aget body "block-count")
(aget body "publish/content-hash")
(aget body "publish/content-length")
(aget body "r2_key")
(aget body "owner_sub")
(aget body "publish/created-at")
(aget body "updated_at"))
(json-response {:ok true}))
(= "GET" (.-method request))
(let [url (js/URL. (.-url request))
parts (string/split (.-pathname url) #"/")
graph-uuid (nth parts 2 nil)
page-uuid (nth parts 3 nil)]
(if (and graph-uuid page-uuid)
(let [rows (get-sql-rows
(sql-exec sql
(str "SELECT page_uuid, graph_uuid, schema_version, block_count, "
"content_hash, content_length, r2_key, owner_sub, created_at, updated_at "
"FROM pages WHERE graph_uuid = ? AND page_uuid = ? LIMIT 1;")
graph-uuid
page-uuid))
row (first rows)]
(if-not row
(not-found)
(json-response (row->meta row))))
(let [rows (get-sql-rows
(sql-exec sql
(str "SELECT page_uuid, graph_uuid, schema_version, block_count, "
"content_hash, content_length, r2_key, owner_sub, created_at, updated_at "
"FROM pages ORDER BY updated_at DESC;")))]
(json-response {:pages (map row->meta rows)}))))
:else
(json-response {:error "method not allowed"} 405))))
(defclass PublishMetaDO
(extends DurableObject)
(constructor [this ^js state env]
(super state env)
(set! (.-state this) state)
(set! (.-env this) env)
(set! (.-sql this) (.-sql ^js (.-storage state))))
Object
(fetch [this request]
(do-fetch this request)))

View File

@@ -18,6 +18,8 @@ metadata in a Durable Object backed by SQLite.
### Routes
- `GET /p/:page-uuid`
- Returns server-rendered HTML for the page
- `POST /pages`
- Requires `Authorization: Bearer <JWT>`
- Requires `x-publish-meta` header (JSON)
@@ -36,3 +38,6 @@ metadata in a Durable Object backed by SQLite.
- For local testing, run `wrangler dev` and use `deps/publish/worker/scripts/dev_test.sh`.
- If you switch schema versions, clear local DO state with
`deps/publish/worker/scripts/clear_dev_state.sh`.
- Build the SSR bundle with `clojure -M:cljs release publish-ssr` before running the worker.
- Build the worker bundle with `clojure -M:cljs release publish-worker` before running the worker.
- For dev, you can run `clojure -M:cljs watch publish-worker` in one terminal.

View File

@@ -30,3 +30,7 @@ echo
curl -sS "${BASE_URL}/pages"
echo
curl -sS "${BASE_URL}/p/${PAGE_UUID}"
echo

View File

@@ -1,501 +0,0 @@
import { DurableObject } from "cloudflare:workers";
const textDecoder = new TextDecoder();
function jsonResponse(data, status = 200) {
return new Response(JSON.stringify(data), {
status,
headers: {
"content-type": "application/json",
},
});
}
function unauthorized() {
return jsonResponse({ error: "unauthorized" }, 401);
}
function badRequest(message) {
return jsonResponse({ error: message }, 400);
}
function base64UrlToUint8Array(input) {
const pad = input.length % 4 ? "=".repeat(4 - (input.length % 4)) : "";
const base64 = (input + pad).replace(/-/g, "+").replace(/_/g, "/");
const raw = atob(base64);
const bytes = new Uint8Array(raw.length);
for (let i = 0; i < raw.length; i += 1) {
bytes[i] = raw.charCodeAt(i);
}
return bytes;
}
function getSqlRows(result) {
if (!result) return [];
if (typeof result.toArray === "function") return result.toArray();
if (typeof result[Symbol.iterator] === "function") {
return Array.from(result);
}
if (Array.isArray(result.results)) return result.results;
if (Array.isArray(result.rows)) return result.rows;
if (Array.isArray(result)) {
if (result.length === 0) return [];
const first = result[0];
if (first && Array.isArray(first.results)) return first.results;
if (first && Array.isArray(first.rows)) return first.rows;
return result;
}
return [];
}
function toHex(buffer) {
return [...new Uint8Array(buffer)]
.map((b) => b.toString(16).padStart(2, "0"))
.join("");
}
async function sha256Hex(message) {
const data = new TextEncoder().encode(message);
const digest = await crypto.subtle.digest("SHA-256", data);
return toHex(digest);
}
async function hmacSha256(key, message) {
const cryptoKey = await crypto.subtle.importKey(
"raw",
key,
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"]
);
return crypto.subtle.sign("HMAC", cryptoKey, message);
}
function encodeRfc3986(value) {
return encodeURIComponent(value).replace(/[!'()*]/g, (c) =>
`%${c.charCodeAt(0).toString(16).toUpperCase()}`
);
}
function encodePath(path) {
return path
.split("/")
.map((part) => encodeRfc3986(part))
.join("/");
}
async function getSignatureKey(secret, dateStamp, region, service) {
const kDate = await hmacSha256(
new TextEncoder().encode(`AWS4${secret}`),
new TextEncoder().encode(dateStamp)
);
const kRegion = await hmacSha256(kDate, new TextEncoder().encode(region));
const kService = await hmacSha256(kRegion, new TextEncoder().encode(service));
return hmacSha256(kService, new TextEncoder().encode("aws4_request"));
}
async function presignR2Url(r2Key, env, expiresSeconds = 300) {
const region = "auto";
const service = "s3";
const host = `${env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`;
const bucket = env.R2_BUCKET;
const method = "GET";
const now = new Date();
const amzDate = now
.toISOString()
.replace(/[:-]|\.\d{3}/g, "");
const dateStamp = amzDate.slice(0, 8);
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
const params = [
["X-Amz-Algorithm", "AWS4-HMAC-SHA256"],
["X-Amz-Credential", `${env.R2_ACCESS_KEY_ID}/${credentialScope}`],
["X-Amz-Date", amzDate],
["X-Amz-Expires", String(expiresSeconds)],
["X-Amz-SignedHeaders", "host"],
];
params.sort((a, b) => (a[0] < b[0] ? -1 : 1));
const canonicalQueryString = params
.map(([k, v]) => `${encodeRfc3986(k)}=${encodeRfc3986(v)}`)
.join("&");
const canonicalUri = `/${bucket}/${encodePath(r2Key)}`;
const canonicalHeaders = `host:${host}\n`;
const signedHeaders = "host";
const payloadHash = "UNSIGNED-PAYLOAD";
const canonicalRequest = [
method,
canonicalUri,
canonicalQueryString,
canonicalHeaders,
signedHeaders,
payloadHash,
].join("\n");
const stringToSign = [
"AWS4-HMAC-SHA256",
amzDate,
credentialScope,
await sha256Hex(canonicalRequest),
].join("\n");
const signingKey = await getSignatureKey(
env.R2_SECRET_ACCESS_KEY,
dateStamp,
region,
service
);
const signature = toHex(await hmacSha256(signingKey, new TextEncoder().encode(stringToSign)));
const signedQuery = `${canonicalQueryString}&X-Amz-Signature=${signature}`;
return `https://${host}${canonicalUri}?${signedQuery}`;
}
function decodeJwtPart(part) {
const bytes = base64UrlToUint8Array(part);
return JSON.parse(textDecoder.decode(bytes));
}
async function importRsaKey(jwk) {
return crypto.subtle.importKey(
"jwk",
jwk,
{
name: "RSASSA-PKCS1-v1_5",
hash: "SHA-256",
},
false,
["verify"]
);
}
async function verifyJwt(token, env) {
const parts = token.split(".");
if (parts.length !== 3) {
return null;
}
const [headerPart, payloadPart, signaturePart] = parts;
const header = decodeJwtPart(headerPart);
const payload = decodeJwtPart(payloadPart);
if (payload.iss !== env.COGNITO_ISSUER) {
return null;
}
if (payload.aud !== env.COGNITO_CLIENT_ID) {
return null;
}
const now = Math.floor(Date.now() / 1000);
if (payload.exp && payload.exp < now) {
return null;
}
const jwksResp = await fetch(env.COGNITO_JWKS_URL);
if (!jwksResp.ok) {
return null;
}
const jwks = await jwksResp.json();
const key = (jwks.keys || []).find((k) => k.kid === header.kid);
if (!key) {
return null;
}
const cryptoKey = await importRsaKey(key);
const data = new TextEncoder().encode(`${headerPart}.${payloadPart}`);
const signature = base64UrlToUint8Array(signaturePart);
const ok = await crypto.subtle.verify("RSASSA-PKCS1-v1_5", cryptoKey, signature, data);
return ok ? payload : null;
}
async function handlePostPages(request, env) {
const authHeader = request.headers.get("authorization") || "";
const token = authHeader.startsWith("Bearer ") ? authHeader.slice(7) : null;
const devSkipAuth = env.DEV_SKIP_AUTH === "true";
if (!token && !devSkipAuth) {
return unauthorized();
}
const claims = devSkipAuth ? { sub: "dev" } : await verifyJwt(token, env);
if (!claims && !devSkipAuth) {
return unauthorized();
}
const metaHeader = request.headers.get("x-publish-meta");
if (!metaHeader) {
return badRequest("missing x-publish-meta header");
}
let meta;
try {
meta = JSON.parse(metaHeader);
} catch (_err) {
return badRequest("invalid x-publish-meta header");
}
if (!meta["publish/content-hash"] || !meta["publish/graph"] || !meta["page-uuid"]) {
return badRequest("missing publish metadata");
}
const body = await request.arrayBuffer();
const r2Key = `publish/${meta["publish/graph"]}/${meta["publish/content-hash"]}.transit`;
const existing = await env.PUBLISH_R2.head(r2Key);
if (!existing) {
await env.PUBLISH_R2.put(r2Key, body, {
httpMetadata: {
contentType: "application/transit+json",
},
});
}
const doId = env.PUBLISH_META_DO.idFromName(meta["page-uuid"]);
const doStub = env.PUBLISH_META_DO.get(doId);
const metaResponse = await doStub.fetch("https://publish/pages", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
...meta,
r2_key: r2Key,
owner_sub: claims.sub,
updated_at: Date.now(),
}),
});
if (!metaResponse.ok) {
return jsonResponse({ error: "metadata store failed" }, 500);
}
const indexId = env.PUBLISH_META_DO.idFromName("index");
const indexStub = env.PUBLISH_META_DO.get(indexId);
await indexStub.fetch("https://publish/pages", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
...meta,
r2_key: r2Key,
owner_sub: claims.sub,
updated_at: Date.now(),
}),
});
return jsonResponse({
page_uuid: meta["page-uuid"],
r2_key: r2Key,
updated_at: Date.now(),
});
}
async function handleGetPage(request, env) {
const url = new URL(request.url);
const pageUuid = url.pathname.split("/")[2];
if (!pageUuid) {
return badRequest("missing page uuid");
}
const doId = env.PUBLISH_META_DO.idFromName(pageUuid);
const doStub = env.PUBLISH_META_DO.get(doId);
const metaResponse = await doStub.fetch(`https://publish/pages/${pageUuid}`);
if (!metaResponse.ok) {
return jsonResponse({ error: "not found" }, 404);
}
const meta = await metaResponse.json();
const etag = meta["publish/content-hash"];
const ifNoneMatch = request.headers.get("if-none-match");
if (etag && ifNoneMatch && ifNoneMatch.replace(/\"/g, "") === etag) {
return new Response(null, {
status: 304,
headers: {
etag,
},
});
}
return jsonResponse(meta, 200);
}
async function handleGetPageTransit(request, env) {
const url = new URL(request.url);
const pageUuid = url.pathname.split("/")[2];
if (!pageUuid) {
return badRequest("missing page uuid");
}
const doId = env.PUBLISH_META_DO.idFromName(pageUuid);
const doStub = env.PUBLISH_META_DO.get(doId);
const metaResponse = await doStub.fetch(`https://publish/pages/${pageUuid}`);
if (!metaResponse.ok) {
return jsonResponse({ error: "not found" }, 404);
}
const meta = await metaResponse.json();
if (!meta.r2_key) {
return jsonResponse({ error: "missing transit" }, 404);
}
const etag = meta["publish/content-hash"];
const ifNoneMatch = request.headers.get("if-none-match");
if (etag && ifNoneMatch && ifNoneMatch.replace(/\"/g, "") === etag) {
return new Response(null, {
status: 304,
headers: {
etag,
},
});
}
const signedUrl = await presignR2Url(meta.r2_key, env);
return jsonResponse(
{
url: signedUrl,
expires_in: 300,
etag,
},
200
);
}
async function handleListPages(request, env) {
const doId = env.PUBLISH_META_DO.idFromName("index");
const doStub = env.PUBLISH_META_DO.get(doId);
const metaResponse = await doStub.fetch("https://publish/pages", {
method: "GET",
});
if (!metaResponse.ok) {
return jsonResponse({ error: "not found" }, 404);
}
const meta = await metaResponse.json();
return jsonResponse(meta, 200);
}
export default {
async fetch(request, env) {
const url = new URL(request.url);
if (url.pathname === "/pages" && request.method === "POST") {
return handlePostPages(request, env);
}
if (url.pathname === "/pages" && request.method === "GET") {
return handleListPages(request, env);
}
if (url.pathname.startsWith("/pages/") && request.method === "GET") {
const parts = url.pathname.split("/");
if (parts[3] === "transit") {
return handleGetPageTransit(request, env);
}
return handleGetPage(request, env);
}
return jsonResponse({ error: "not found" }, 404);
},
};
export class PublishMetaDO extends DurableObject {
constructor(state, env) {
super(state, env);
this.state = state;
this.env = env;
this.sql = state.storage.sql;
}
async initSchema() {
const cols = getSqlRows(this.sql.exec("PRAGMA table_info(pages);"));
const hasLegacyId = cols.some((col) => col.name === "page_id");
if (hasLegacyId) {
this.sql.exec("DROP TABLE IF EXISTS pages;");
}
this.sql.exec(`
CREATE TABLE IF NOT EXISTS pages (
page_uuid TEXT NOT NULL,
graph TEXT NOT NULL,
schema_version TEXT,
block_count INTEGER,
content_hash TEXT NOT NULL,
content_length INTEGER,
r2_key TEXT NOT NULL,
owner_sub TEXT,
created_at INTEGER,
updated_at INTEGER,
PRIMARY KEY (graph, page_uuid)
);
`);
}
async fetch(request) {
await this.initSchema();
if (request.method === "POST") {
const body = await request.json();
this.sql.exec(
`
INSERT INTO pages (
page_uuid,
graph,
schema_version,
block_count,
content_hash,
content_length,
r2_key,
owner_sub,
created_at,
updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(graph, page_uuid) DO UPDATE SET
page_uuid=excluded.page_uuid,
schema_version=excluded.schema_version,
block_count=excluded.block_count,
content_hash=excluded.content_hash,
content_length=excluded.content_length,
r2_key=excluded.r2_key,
owner_sub=excluded.owner_sub,
updated_at=excluded.updated_at;
`,
body["page-uuid"],
body["publish/graph"],
body["schema-version"],
body["block-count"],
body["publish/content-hash"],
body["publish/content-length"],
body["r2_key"],
body["owner_sub"],
body["publish/created-at"],
body["updated_at"]
);
return jsonResponse({ ok: true });
}
if (request.method === "GET") {
const url = new URL(request.url);
const parts = url.pathname.split("/");
const pageUuid = parts[2];
if (pageUuid) {
const result = this.sql.exec(
`
SELECT page_uuid, graph, schema_version, block_count,
content_hash, content_length, r2_key, owner_sub, created_at, updated_at
FROM pages WHERE page_uuid = ? LIMIT 1;
`,
pageUuid
);
const rows = getSqlRows(result);
const row = rows[0];
if (!row) {
return jsonResponse({ error: "not found" }, 404);
}
return jsonResponse({
...row,
"publish/content-hash": row.content_hash,
"publish/content-length": row.content_length,
});
}
const result = this.sql.exec(`
SELECT page_uuid, graph, schema_version, block_count,
content_hash, content_length, r2_key, owner_sub, created_at, updated_at
FROM pages ORDER BY updated_at DESC;
`);
const rows = getSqlRows(result);
return jsonResponse({
pages: rows.map((row) => ({
...row,
"publish/content-hash": row.content_hash,
"publish/content-length": row.content_length,
})),
});
}
return jsonResponse({ error: "method not allowed" }, 405);
}
}

View File

@@ -1,6 +1,12 @@
name = "logseq-publish"
main = "src/index.js"
compatibility_date = "2024-11-01"
main = "dist/worker/main.js"
compatibility_date = "2025-02-04"
# Workers Logs
# Docs: https://developers.cloudflare.com/workers/observability/logs/workers-logs/
# Configuration: https://developers.cloudflare.com/workers/observability/logs/workers-logs/#enable-workers-logs
[observability]
enabled = true
[[durable_objects.bindings]]
name = "PUBLISH_META_DO"

View File

@@ -1,6 +1,7 @@
;; shadow-cljs configuration
{:deps true
:nrepl {:port 8701}
:source-paths ["src/publish-ssr" "src/main" "src/electron" "src/resources"]
;; :ssl {:password "logseq"}
@@ -216,4 +217,16 @@
:entries [logseq.shui.storybook]
:output-dir "packages/ui/.storybook/cljs"
:devtools {:enabled true}
:compiler-options {:optimizations :simple}}}}
:compiler-options {:optimizations :simple}}
:publish-ssr {:target :npm-module
:entries [frontend.publish.ssr]
:output-dir "deps/publish/worker/dist/ssr"
:compiler-options {:optimizations :simple}}
:publish-worker {:target :esm
:output-dir "deps/publish/worker/dist/worker"
:modules {:main {:exports {default logseq.publish.worker/worker
PublishMetaDO logseq.publish.worker/PublishMetaDO}}}
:js-options {:js-provider :import}
:devtools {:enabled false}}}}

View File

@@ -25,12 +25,12 @@
;; when it launches (when pro plan launches) it should be removed
(def ENABLE-SETTINGS-ACCOUNT-TAB false)
(def PUBLISH-API-BASE "http://localhost:8787")
(if ENABLE-FILE-SYNC-PRODUCTION
(do (def LOGIN-URL
"https://logseq-prod.auth.us-east-1.amazoncognito.com/login?client_id=3c7np6bjtb4r1k1bi9i049ops5&response_type=code&scope=email+openid+phone&redirect_uri=logseq%3A%2F%2Fauth-callback")
(def API-DOMAIN "api.logseq.com")
(def PUBLISH-API-DOMAIN "publish.logseq.com")
(def PUBLISH-API-BASE (str "https://" PUBLISH-API-DOMAIN))
(def COGNITO-IDP "https://cognito-idp.us-east-1.amazonaws.com/")
(def COGNITO-CLIENT-ID "69cs1lgme7p8kbgld8n5kseii6")
(def REGION "us-east-1")
@@ -41,8 +41,6 @@
(do (def LOGIN-URL
"https://logseq-test2.auth.us-east-2.amazoncognito.com/login?client_id=3ji1a0059hspovjq5fhed3uil8&response_type=code&scope=email+openid+phone&redirect_uri=logseq%3A%2F%2Fauth-callback")
(def API-DOMAIN "api-dev.logseq.com")
(def PUBLISH-API-DOMAIN "publish-dev.logseq.com")
(def PUBLISH-API-BASE (str "https://" PUBLISH-API-DOMAIN))
(def COGNITO-IDP "https://cognito-idp.us-east-2.amazonaws.com/")
(def COGNITO-CLIENT-ID "1qi1uijg8b6ra70nejvbptis0q")
(def REGION "us-east-2")

View File

@@ -15,19 +15,19 @@
(throw (ex-info "Publish fetch failed" {:status (.-status resp) :body body}))))))
(defn- cache-key
[page-uuid]
(str "publish/" page-uuid))
[graph-uuid page-uuid]
(str "publish/" graph-uuid "/" page-uuid))
(defn- get-cache
[page-uuid]
(when-let [raw (js/localStorage.getItem (cache-key page-uuid))]
[graph-uuid page-uuid]
(when-let [raw (js/localStorage.getItem (cache-key graph-uuid page-uuid))]
(try
(js/JSON.parse raw)
(catch :default _e nil))))
(defn- set-cache!
[page-uuid value]
(js/localStorage.setItem (cache-key page-uuid)
[graph-uuid page-uuid value]
(js/localStorage.setItem (cache-key graph-uuid page-uuid)
(js/JSON.stringify (clj->js value))))
(defn <get-page-meta
@@ -35,60 +35,70 @@
Returns {:status 200 :data <meta>} or {:status 304}.
"
[page-uuid]
(let [cached (get-cache page-uuid)
headers (cond-> {}
(and cached (.-etag cached))
(assoc "if-none-match" (.-etag cached)))]
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" page-uuid)
headers)]
(if (= 304 (:status resp))
resp
(let [meta (js->clj (:data resp) :keywordize-keys true)
etag (get meta :publish/content-hash)]
(set-cache! page-uuid {:etag etag :meta meta})
{:status 200 :data meta}))))
([page-uuid]
(<get-page-meta page-uuid (get-graph-uuid)))
([page-uuid graph-uuid]
(when-not graph-uuid
(throw (ex-info "Missing graph UUID" {:page-uuid page-uuid})))
(let [cached (get-cache graph-uuid page-uuid)
headers (cond-> {}
(and cached (.-etag cached))
(assoc "if-none-match" (.-etag cached)))]
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" graph-uuid "/" page-uuid)
headers)]
(if (= 304 (:status resp))
resp
(let [meta (js->clj (:data resp) :keywordize-keys true)
etag (get meta :publish/content-hash)]
(set-cache! graph-uuid page-uuid {:etag etag :meta meta})
{:status 200 :data meta}))))))
(defn <get-transit-url
"Fetch a signed transit URL. Uses meta ETag caching if provided.
Returns {:status 200 :data {:url ... :etag ...}} or {:status 304}.
"
[page-uuid]
(let [cached (get-cache page-uuid)
headers (cond-> {}
(and cached (.-etag cached))
(assoc "if-none-match" (.-etag cached)))]
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" page-uuid
"/transit")
headers)]
(if (= 304 (:status resp))
resp
(let [data (js->clj (:data resp) :keywordize-keys true)]
{:status 200 :data data}))))
([page-uuid]
(<get-transit-url page-uuid (get-graph-uuid)))
([page-uuid graph-uuid]
(when-not graph-uuid
(throw (ex-info "Missing graph UUID" {:page-uuid page-uuid})))
(let [cached (get-cache graph-uuid page-uuid)
headers (cond-> {}
(and cached (.-etag cached))
(assoc "if-none-match" (.-etag cached)))]
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" graph-uuid "/" page-uuid
"/transit")
headers)]
(if (= 304 (:status resp))
resp
(let [data (js->clj (:data resp) :keywordize-keys true)]
{:status 200 :data data}))))))
(defn <get-published-transit
"Fetch the published transit blob and return its text body.
If the metadata is unchanged, returns {:status 304}.
"
[page-uuid]
(p/let [meta-resp (<get-page-meta page-uuid)]
(if (= 304 (:status meta-resp))
meta-resp
(p/let [url-resp (<get-transit-url page-uuid)]
(if (= 304 (:status url-resp))
url-resp
(let [url (get-in url-resp [:data :url])]
(p/let [resp (js/fetch url)]
(if (.-ok resp)
(p/let [text (.text resp)]
{:status 200
:etag (get-in url-resp [:data :etag])
:body text})
(p/let [body (.text resp)]
(throw (ex-info "Publish transit fetch failed"
{:status (.-status resp) :body body})))))))))))
([page-uuid]
(<get-published-transit page-uuid (get-graph-uuid)))
([page-uuid graph-uuid]
(p/let [meta-resp (<get-page-meta page-uuid graph-uuid)]
(if (= 304 (:status meta-resp))
meta-resp
(p/let [url-resp (<get-transit-url page-uuid graph-uuid)]
(if (= 304 (:status url-resp))
url-resp
(let [url (get-in url-resp [:data :url])]
(p/let [resp (js/fetch url)]
(if (.-ok resp)
(p/let [text (.text resp)]
{:status 200
:etag (get-in url-resp [:data :etag])
:body text})
(p/let [body (.text resp)]
(throw (ex-info "Publish transit fetch failed"
{:status (.-status resp) :body body}))))))))))))
(defn get-graph-uuid
"Returns the RTC graph UUID if available."

View File

@@ -0,0 +1,80 @@
(ns frontend.publish.ssr
"SSR entry for published pages."
(:require ["react" :as react]
["react-dom/server" :as react-dom-server]
[datascript.core :as d]
[frontend.components.page :as page]
[frontend.db.conn-state :as conn-state]
[frontend.state :as state]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]
[rum.core :as rum]))
(def ^:private minimal-css
(str
"html,body{margin:0;padding:0;background:#fff;color:#111;}",
"body{font-family:Inter,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Helvetica,Arial,sans-serif;}",
".cp__page-inner-wrap{max-width:900px;margin:0 auto;padding:32px 24px;}",
".page-inner{gap:24px;}",
".page-title{font-size:28px;font-weight:600;margin:0;}",
".ls-page-blocks{margin-top:16px;}",
".ls-block{margin:6px 0;}",
".block-content{line-height:1.6;}",
"a{color:#2563eb;text-decoration:none;}",
"a:hover{text-decoration:underline;}"))
(defn- ensure-global-stubs!
[]
(let [g js/globalThis]
(when-not (.-React g)
(set! (.-React g) react))
(when-not (.-ReactDOMServer g)
(set! (.-ReactDOMServer g) react-dom-server))
(when-not (.-window g)
(set! (.-window g) g))
(when-not (.-document g)
(set! (.-document g)
#js {:getElementById (fn [_] nil)
:getElementsByClassName (fn [_] #js [])
:querySelector (fn [_] nil)}))
(when-not (.-navigator g)
(set! (.-navigator g) #js {:userAgent ""}))))
(defn- prepare-state!
[repo conn]
(swap! conn-state/conns assoc repo conn)
(state/set-current-repo! repo)
(swap! state/state merge
{:git/current-repo repo
:route-match {:data {:name :page}}
:config {repo {}}}))
(defn- render-page-html
[db page-uuid]
(let [entity (d/entity db [:block/uuid page-uuid])
title (or (:block/title entity) "Logseq Publish")
body (rum/render-static-markup
[:div#root
(page/page-inner {:page entity :repo (state/get-current-repo)})])]
(str "<!doctype html>"
"<html><head><meta charset=\"utf-8\"/>"
"<meta name=\"viewport\" content=\"width=device-width,initial-scale=1\"/>"
"<title>" title "</title>"
"<style>" minimal-css "</style>"
"</head><body>"
body
"</body></html>")))
(defn ^:export render-page
"Render a published page HTML string from transit payload and page uuid string."
[transit-str page-uuid-str]
(ensure-global-stubs!)
(let [payload (ldb/read-transit-str transit-str)
datoms (:datoms payload)
repo (:publish/graph payload)
conn (d/conn-from-datoms datoms db-schema/schema)
page-uuid (uuid page-uuid-str)]
(prepare-state! repo conn)
(render-page-html @conn page-uuid)))
(set! (.-logseqPublishRender js/globalThis) render-page)

View File

@@ -0,0 +1,9 @@
(ns frontend.components.lazy-editor
"SSR stub for code editor."
(:require [rum.core :as rum]))
(rum/defc editor
[_config _id _attr code _options]
[:pre.code-editor
{:style {:white-space "pre-wrap"}}
(or code "")])