node.js adapter

This commit is contained in:
Tienson Qin
2026-01-30 22:51:44 +08:00
parent 595957ef05
commit b8445a6395
29 changed files with 1197 additions and 11 deletions

1
.gitignore vendored
View File

@@ -79,3 +79,4 @@ clj-e2e/.wally
clj-e2e/e2e-dump
.dir-locals.el
.projectile
deps/db-sync/data

77
deps/db-sync/README.md vendored Normal file
View File

@@ -0,0 +1,77 @@
# Logseq DB Sync (deps/db-sync)
This package contains the DB sync server code and tests used by Logseq.
It includes the Cloudflare Worker implementation and a Node.js adapter for self-hosting.
## Requirements
- Node.js (see repo root for required version)
- Clojure (for shadow-cljs builds)
## Build and Test
### Cloudflare Worker
```bash
cd deps/db-sync
yarn watch
# open another terminal
cd deps/db-sync/worker
wrangler dev
```
### Node.js Adapter (self-hosted)
Build the adapter:
```bash
cd deps/db-sync
npm run build:node-adapter
```
Run the adapter with Cognito auth:
```bash
./start.sh
```
Run the adapter with a static token (local dev):
```bash
export DB_SYNC_AUTH_DRIVER=static
export DB_SYNC_AUTH_TOKEN=dev-token
export DB_SYNC_STATIC_USER_ID=user-1
export DB_SYNC_PORT=8787
node worker/dist/node-adapter.js
```
### Tests
Run db-sync tests (includes Node adapter tests):
```bash
cd deps/db-sync
npm run test:node-adapter
```
## Environment Variables
| Variable | Purpose |
| --- | --- |
| DB_SYNC_PORT | HTTP server port |
| DB_SYNC_BASE_URL | External base URL for asset links |
| DB_SYNC_DATA_DIR | Data directory for sqlite + assets |
| DB_SYNC_STORAGE_DRIVER | Storage backend selection (sqlite) |
| DB_SYNC_ASSETS_DRIVER | Assets backend selection (filesystem) |
| DB_SYNC_AUTH_DRIVER | Auth driver (cognito, static, none) |
| DB_SYNC_AUTH_TOKEN | Static token for local dev |
| DB_SYNC_STATIC_USER_ID | Static user id for local dev |
| DB_SYNC_STATIC_EMAIL | Static user email for local dev |
| DB_SYNC_STATIC_USERNAME | Static username for local dev |
| COGNITO_ISSUER | Cognito issuer URL |
| COGNITO_CLIENT_ID | Cognito client id |
| COGNITO_JWKS_URL | Cognito JWKS URL |
## Notes
- Protocol definitions live in `docs/agent-guide/db-sync/protocol.md`.
- DB sync implementation guide is in `docs/agent-guide/db-sync/db-sync-guide.md`.

View File

@@ -6,12 +6,18 @@
"dev": "cd ./worker && npx wrangler dev",
"watch": "clojure -M:cljs watch db-sync",
"release": "clojure -M:cljs release db-sync",
"build:node-adapter": "clojure -M:cljs release db-sync-node",
"dev:node-adapter": "clojure -M:cljs watch db-sync-node",
"start:node-adapter": "node worker/dist/node-adapter.js",
"test:node-adapter": "clojure -M:cljs compile db-sync-test && node worker/dist/worker-test.js",
"test": "clojure -M:cljs compile db-sync-test && node worker/dist/worker-test.js",
"clean": "rm -rf ./worker/dist/",
"deploy-prod": "yarn clean && yarn release && cd ./worker && wrangler deploy --env prod",
"deploy-staging": "yarn clean && yarn release && cd ./worker && wrangler deploy --env staging"
},
"dependencies": {
"better-sqlite3": "^11.10.0",
"ws": "^8.18.3",
"shadow-cljs": "^3.3.4"
}
}

View File

@@ -10,6 +10,10 @@
:js-options {:js-provider :import}
:closure-defines {shadow.cljs.devtools.client.env/enabled false}
:devtools {:enabled false}}
:db-sync-node {:target :node-script
:output-to "worker/dist/node-adapter.js"
:main logseq.db-sync.node.entry/main
:devtools {:enabled false}}
:db-sync-test {:target :node-test
:output-to "worker/dist/worker-test.js"
:devtools {:enabled false}

View File

@@ -1,5 +1,6 @@
(ns logseq.db-sync.common
(:require [clojure.string :as string]
[logseq.db-sync.platform.core :as platform]
[logseq.db.sqlite.util :as sqlite-util]
[promesa.core :as p]))
@@ -14,7 +15,7 @@
(defn json-response
([data] (json-response data 200))
([data status]
(js/Response.
(platform/response
(js/JSON.stringify (clj->js data))
#js {:status status
:headers (js/Object.assign
@@ -22,7 +23,7 @@
(cors-headers))})))
(defn options-response []
(js/Response. nil #js {:status 204 :headers (cors-headers)}))
(platform/response nil #js {:status 204 :headers (cors-headers)}))
(defn bad-request [message]
(json-response {:error message} 400))

View File

@@ -0,0 +1,58 @@
(ns logseq.db-sync.node.assets
(:require ["fs" :as fs]
["path" :as node-path]
[promesa.core :as p]))
(defn- ensure-dir! [dir]
(.mkdirSync fs dir #js {:recursive true}))
(defn- meta-path [path]
(str path ".meta.json"))
(defn- read-meta [path]
(p/let [content (.readFile (.-promises fs) path "utf8")]
(js/JSON.parse content)))
(defn- write-meta! [path meta]
(.writeFile (.-promises fs) path (js/JSON.stringify meta) "utf8"))
(defn- normalize-bytes [data]
(cond
(instance? js/Uint8Array data) data
(instance? js/ArrayBuffer data) (js/Uint8Array. data)
:else (js/Uint8Array. data)))
(defn make-bucket [base-dir]
(ensure-dir! base-dir)
#js {:get (fn [key]
(let [file-path (node-path/join base-dir key)
meta-file (meta-path file-path)]
(p/catch
(p/let [buf (.readFile (.-promises fs) file-path)
meta (p/catch (read-meta meta-file) (fn [_] #js {}))]
#js {:body buf
:httpMetadata #js {:contentType (aget meta "contentType")
:contentEncoding (aget meta "contentEncoding")
:cacheControl (aget meta "cacheControl")}
:customMetadata (aget meta "customMetadata")})
(fn [_] nil))))
:put (fn [key body opts]
(let [file-path (node-path/join base-dir key)
meta-file (meta-path file-path)
dir (node-path/dirname file-path)
data (normalize-bytes body)
metadata (or (aget opts "httpMetadata") #js {})
custom (or (aget opts "customMetadata") #js {})]
(ensure-dir! dir)
(p/let [_ (.writeFile (.-promises fs) file-path data)
_ (write-meta! meta-file #js {:contentType (aget metadata "contentType")
:contentEncoding (aget metadata "contentEncoding")
:cacheControl (aget metadata "cacheControl")
:customMetadata custom})]
#js {:ok true})))
:delete (fn [key]
(let [file-path (node-path/join base-dir key)
meta-file (meta-path file-path)]
(p/let [_ (p/catch (.unlink (.-promises fs) file-path) (fn [_] nil))
_ (p/catch (.unlink (.-promises fs) meta-file) (fn [_] nil))]
#js {:ok true})))})

View File

@@ -0,0 +1,49 @@
(ns logseq.db-sync.node.config
(:require [clojure.string :as string]))
(defn- env-value [^js env k]
(let [v (aget env k)]
(when (seq v) v)))
(defn- parse-int [v default]
(let [n (js/parseInt v 10)]
(if (js/isNaN n) default n)))
(defn config-from-env []
(let [env (.-env js/process)]
{:port (when-let [v (env-value env "DB_SYNC_PORT")] (parse-int v 8080))
:base-url (env-value env "DB_SYNC_BASE_URL")
:data-dir (or (env-value env "DB_SYNC_DATA_DIR") "data/db-sync")
:storage-driver (or (env-value env "DB_SYNC_STORAGE_DRIVER") "sqlite")
:assets-driver (or (env-value env "DB_SYNC_ASSETS_DRIVER") "filesystem")
:auth-driver (or (env-value env "DB_SYNC_AUTH_DRIVER") "cognito")
:auth-token (env-value env "DB_SYNC_AUTH_TOKEN")
:static-user-id (env-value env "DB_SYNC_STATIC_USER_ID")
:static-email (env-value env "DB_SYNC_STATIC_EMAIL")
:static-username (env-value env "DB_SYNC_STATIC_USERNAME")
:log-level (or (env-value env "DB_SYNC_LOG_LEVEL") "info")
:cognito-issuer (env-value env "COGNITO_ISSUER")
:cognito-client-id (env-value env "COGNITO_CLIENT_ID")
:cognito-jwks-url (env-value env "COGNITO_JWKS_URL")}))
(defn normalize-config [overrides]
(let [defaults {:port 8080
:data-dir "data/db-sync"
:storage-driver "sqlite"
:assets-driver "filesystem"
:auth-driver "cognito"
:log-level "info"}
merged (merge defaults (config-from-env) overrides)
auth-driver (string/lower-case (:auth-driver merged))
storage-driver (string/lower-case (:storage-driver merged))
assets-driver (string/lower-case (:assets-driver merged))]
(when-not (#{"cognito" "static" "none"} auth-driver)
(throw (js/Error. (str "unsupported auth driver: " auth-driver))))
(when-not (#{"sqlite"} storage-driver)
(throw (js/Error. (str "unsupported storage driver: " storage-driver))))
(when-not (#{"filesystem"} assets-driver)
(throw (js/Error. (str "unsupported assets driver: " assets-driver))))
(assoc merged
:auth-driver auth-driver
:storage-driver storage-driver
:assets-driver assets-driver)))

View File

@@ -0,0 +1,60 @@
(ns logseq.db-sync.node.dispatch
(:require [clojure.string :as string]
[logseq.db-sync.common :as common]
[logseq.db-sync.node.graph :as graph]
[logseq.db-sync.node.routes :as node-routes]
[logseq.db-sync.platform.core :as platform]
[logseq.db-sync.worker.handler.assets :as assets-handler]
[logseq.db-sync.worker.handler.index :as index-handler]
[logseq.db-sync.worker.handler.sync :as sync-handler]
[logseq.db-sync.worker.http :as http]
[promesa.core :as p]))
(defn handle-node-fetch
[{:keys [request env registry deps]}]
(let [url (platform/request-url request)
path (.-pathname url)
method (.-method request)
index-self #js {:env env :d1 (aget env "DB")}]
(cond
(= path "/health")
(http/json-response :worker/health {:ok true})
(or (= path "/graphs")
(string/starts-with? path "/graphs/"))
(index-handler/handle-fetch index-self request)
(string/starts-with? path "/e2ee")
(index-handler/handle-fetch index-self request)
(string/starts-with? path "/assets/")
(if (= method "OPTIONS")
(assets-handler/handle request env)
(if-let [{:keys [graph-id]} (assets-handler/parse-asset-path path)]
(p/let [access-resp (index-handler/graph-access-response request env graph-id)]
(if (.-ok access-resp)
(assets-handler/handle request env)
access-resp))
(http/bad-request "invalid asset path")))
(= method "OPTIONS")
(common/options-response)
(string/starts-with? path "/sync/")
(if-let [{:keys [graph-id tail]} (node-routes/parse-sync-path path)]
(if (seq graph-id)
(if (= method "OPTIONS")
(common/options-response)
(p/let [access-resp (index-handler/graph-access-response request env graph-id)]
(if (.-ok access-resp)
(let [ctx (graph/get-or-create-graph registry deps graph-id)
new-url (js/URL. (str (.-origin url) tail (.-search url)))]
(.set (.-searchParams new-url) "graph-id" graph-id)
(let [rewritten (platform/request (.toString new-url) request)]
(sync-handler/handle-http ctx rewritten)))
access-resp)))
(http/bad-request "missing graph id"))
(http/bad-request "missing graph id"))
:else
(http/not-found))))

View File

@@ -0,0 +1,16 @@
(ns logseq.db-sync.node.entry
(:require [logseq.db-sync.node.config :as config]
[logseq.db-sync.node.server :as server]
[promesa.core :as p]))
(defonce ^:private *server (atom nil))
(defn main [& _args]
(let [cfg (config/normalize-config {})]
(js/console.log "Starting Logseq sync...")
(-> (server/start! cfg)
(p/then (fn [result]
(reset! *server result)
(js/console.log (str "Logseq sync listening on port " (:port result)))))
(p/catch (fn [error]
(js/console.error "Logseq sync failed to start" error))))))

View File

@@ -0,0 +1,45 @@
(ns logseq.db-sync.node.graph
(:require [logseq.db-sync.node.storage :as storage]))
(defn- make-state []
(let [sockets (atom #{})]
#js {:getWebSockets (fn [] (to-array @sockets))
:addWebSocket (fn [ws] (swap! sockets conj ws))
:removeWebSocket (fn [ws] (swap! sockets disj ws))}))
(defn- env-object [cfg index-db assets-bucket]
(doto (js-obj)
(aset "DB" index-db)
(aset "LOGSEQ_SYNC_ASSETS" assets-bucket)
(aset "DB_SYNC_AUTH_DRIVER" (:auth-driver cfg))
(aset "DB_SYNC_AUTH_TOKEN" (:auth-token cfg))
(aset "DB_SYNC_STATIC_USER_ID" (:static-user-id cfg))
(aset "DB_SYNC_STATIC_EMAIL" (:static-email cfg))
(aset "DB_SYNC_STATIC_USERNAME" (:static-username cfg))
(aset "COGNITO_ISSUER" (:cognito-issuer cfg))
(aset "COGNITO_CLIENT_ID" (:cognito-client-id cfg))
(aset "COGNITO_JWKS_URL" (:cognito-jwks-url cfg))))
(defn graph-context
[{:keys [config index-db assets-bucket]} graph-id]
(let [{:keys [sql]} (storage/open-graph-db (:data-dir config) graph-id)
state (make-state)
env (env-object config index-db assets-bucket)]
#js {:state state
:env env
:sql sql
:conn nil
:schema-ready false}))
(defn get-or-create-graph
[registry deps graph-id]
(or (get @registry graph-id)
(let [ctx (graph-context deps graph-id)]
(swap! registry assoc graph-id ctx)
ctx)))
(defn close-graphs! [registry]
(doseq [[_ ^js ctx] @registry]
(when-let [^js sql (.-sql ctx)]
(when-let [close (.-close sql)]
(close)))))

View File

@@ -0,0 +1,16 @@
(ns logseq.db-sync.node.routes
(:require [clojure.string :as string]))
(defn parse-sync-path [path]
(when (string/starts-with? path "/sync/")
(let [rest-path (subs path (count "/sync/"))
rest-path (if (string/starts-with? rest-path "/")
(subs rest-path 1)
rest-path)
slash-idx (or (string/index-of rest-path "/") -1)
graph-id (if (neg? slash-idx) rest-path (subs rest-path 0 slash-idx))
tail (if (neg? slash-idx)
"/"
(subs rest-path slash-idx))]
{:graph-id graph-id
:tail tail})))

View File

@@ -0,0 +1,143 @@
(ns logseq.db-sync.node.server
(:require ["http" :as http]
["path" :as node-path]
["ws" :as ws]
[lambdaisland.glogi :as log]
[lambdaisland.glogi.console :as glogi-console]
[logseq.db-sync.index :as index]
[logseq.db-sync.node.assets :as assets]
[logseq.db-sync.node.config :as config]
[logseq.db-sync.node.dispatch :as dispatch]
[logseq.db-sync.node.graph :as graph]
[logseq.db-sync.node.routes :as node-routes]
[logseq.db-sync.node.storage :as storage]
[logseq.db-sync.platform.core :as platform]
[logseq.db-sync.platform.node :as platform-node]
[logseq.db-sync.worker.auth :as auth]
[logseq.db-sync.worker.handler.ws :as ws-handler]
[logseq.db-sync.worker.presence :as presence]
[promesa.core :as p]))
(glogi-console/install!)
(defn- make-env [cfg index-db assets-bucket]
(doto (js-obj)
(aset "DB" index-db)
(aset "LOGSEQ_SYNC_ASSETS" assets-bucket)
(aset "DB_SYNC_AUTH_DRIVER" (:auth-driver cfg))
(aset "DB_SYNC_AUTH_TOKEN" (:auth-token cfg))
(aset "DB_SYNC_STATIC_USER_ID" (:static-user-id cfg))
(aset "DB_SYNC_STATIC_EMAIL" (:static-email cfg))
(aset "DB_SYNC_STATIC_USERNAME" (:static-username cfg))
(aset "COGNITO_ISSUER" (:cognito-issuer cfg))
(aset "COGNITO_CLIENT_ID" (:cognito-client-id cfg))
(aset "COGNITO_JWKS_URL" (:cognito-jwks-url cfg))))
(defn- access-allowed?
[env graph-id request]
(p/let [claims (auth/auth-claims request env)
user-id (when claims (aget claims "sub"))
db (aget env "DB")]
(if (string? user-id)
(index/<user-has-access-to-graph? db graph-id user-id)
false)))
(defn- attach-ws! [^js ctx ^js socket]
(let [state (.-state ctx)]
(when-let [add-ws (.-addWebSocket state)]
(add-ws socket))
(set! (.-serializeAttachment socket) (fn [_] nil))
(set! (.-deserializeAttachment socket) (fn [] nil))))
(defn- detach-ws! [^js ctx ^js socket]
(let [state (.-state ctx)]
(when-let [remove-ws (.-removeWebSocket state)]
(remove-ws socket))))
(defn- handle-ws-connection
[ctx env request ^js socket]
(p/let [claims (auth/auth-claims request env)
user (presence/claims->user claims)]
(when user
(presence/add-presence! ctx socket user))
(presence/broadcast-online-users! ctx))
(.on socket "message"
(fn [data]
(let [text (if (string? data) data (.toString data))]
(try
(ws-handler/handle-ws-message! ctx socket text)
(catch :default e
(log/error :db-sync/ws-error e)
(.send socket (js/JSON.stringify #js {:type "error" :message "server error"})))))))
(.on socket "close"
(fn []
(presence/remove-presence! ctx socket)
(presence/broadcast-online-users! ctx)
(detach-ws! ctx socket)))
(.on socket "error"
(fn [error]
(presence/remove-presence! ctx socket)
(presence/broadcast-online-users! ctx)
(detach-ws! ctx socket)
(log/error :db-sync/ws-error {:error error}))))
(defn start!
[overrides]
(let [cfg (config/normalize-config overrides)
index-db (storage/open-index-db (:data-dir cfg))
assets-bucket (assets/make-bucket (node-path/join (:data-dir cfg) "assets"))
env (make-env cfg index-db assets-bucket)
registry (atom {})
deps {:config cfg
:index-db index-db
:assets-bucket assets-bucket}
server (.createServer http
(fn [req res]
(p/let [request (platform-node/request-from-node req {:scheme "http"})
response (dispatch/handle-node-fetch {:request request
:env env
:registry registry
:deps deps})]
(platform-node/send-response! res response))))
WSS (or (.-WebSocketServer ws) (.-Server ws))
^js wss (new WSS #js {:noServer true})]
(.on server "error" (fn [error] (log/error :db-sync/node-server-error {:error error})))
(.on wss "error" (fn [error] (log/error :db-sync/node-ws-error {:error error})))
(p/let [_ (index/<index-init! index-db)]
(.on server "upgrade"
(fn [req ^js socket head]
(let [request (platform-node/request-from-node req {:scheme "http"})
url (platform/request-url request)
path (.-pathname url)
parsed (node-routes/parse-sync-path path)
graph-id (:graph-id parsed)]
(if (and graph-id (seq graph-id))
(p/let [allowed? (access-allowed? env graph-id request)]
(if allowed?
(.handleUpgrade wss req socket head
(fn [ws-socket]
(let [ctx (graph/get-or-create-graph registry deps graph-id)]
(attach-ws! ctx ws-socket)
(handle-ws-connection ctx env request ws-socket))))
(.destroy socket)))
(.destroy socket)))))
(p/let [_ (js/Promise.
(fn [resolve]
(.listen server (:port cfg)
(fn [] (resolve nil)))))
address (.address server)
port (if (number? address) address (.-port address))
base-url (or (:base-url cfg) (str "http://localhost:" port))]
{:server server
:wss wss
:env env
:registry registry
:port port
:base-url base-url
:stop! (fn []
(graph/close-graphs! registry)
(when-let [close (.-close index-db)]
(close))
(js/Promise.
(fn [resolve]
(.close server (fn [] (resolve nil))))))}))))

View File

@@ -0,0 +1,50 @@
(ns logseq.db-sync.node.storage
(:require ["better-sqlite3" :as sqlite3]
["fs" :as fs]
["path" :as node-path]
[clojure.string :as string]
[logseq.db.common.sqlite :as common-sqlite]))
(def sqlite (if (find-ns 'nbb.core) (aget sqlite3 "default") sqlite3))
(defn- ensure-dir! [dir]
(.mkdirSync fs dir #js {:recursive true}))
(defn- normalize-sql [sql]
(-> sql string/trim string/lower-case))
(defn- select-sql? [sql]
(string/starts-with? (normalize-sql sql) "select"))
(defn- exec-with-args [^js stmt args]
(.apply (.-run stmt) stmt (to-array args)))
(defn- all-with-args [^js stmt args]
(.apply (.-all stmt) stmt (to-array args)))
(defn- wrap-db [^js db]
#js {:exec (fn [sql & args]
(if (seq args)
(let [stmt (.prepare db sql)]
(if (select-sql? sql)
(all-with-args stmt args)
(do
(exec-with-args stmt args)
nil)))
(if (select-sql? sql)
(.all (.prepare db sql))
(.exec db sql))))
:prepare (fn [sql] (.prepare db sql))
:close (fn [] (.close db))
:_db db})
(defn open-index-db [data-dir]
(let [db-path (node-path/join data-dir "index.sqlite")]
(ensure-dir! (node-path/dirname db-path))
(wrap-db (new sqlite db-path nil))))
(defn open-graph-db [data-dir graph-id]
(let [[graph-name db-path] (common-sqlite/get-db-full-path (node-path/join data-dir "graphs") graph-id)]
(ensure-dir! (node-path/dirname db-path))
{:graph-name graph-name
:sql (wrap-db (new sqlite db-path nil))}))

View File

@@ -0,0 +1,6 @@
(ns logseq.db-sync.platform.cloudflare
(:require [logseq.db-sync.platform.core :as core]))
(def response core/response)
(def request core/request)
(def request-url core/request-url)

View File

@@ -0,0 +1,13 @@
(ns logseq.db-sync.platform.core)
(defn response
[body init]
(js/Response. body init))
(defn request
[url init]
(js/Request. url init))
(defn request-url
[request]
(js/URL. (.-url request)))

View File

@@ -0,0 +1,50 @@
(ns logseq.db-sync.platform.node
(:require [clojure.string :as string]
[logseq.db-sync.platform.core :as core]
[promesa.core :as p]))
(defn- headers->object [headers]
(let [out (js-obj)]
(.forEach headers (fn [value key] (aset out key value)))
out))
(defn request-from-node
[^js req {:keys [scheme host]}]
(let [headers (js/Headers.)
node-headers (.-headers req)
header-keys (js/Object.keys node-headers)
_ (doseq [k header-keys]
(let [value (aget node-headers k)]
(when (some? value)
(.set headers (string/lower-case k) value))))
method (or (.-method req) "GET")
host (or host (aget node-headers "host") "localhost")
scheme (or scheme "http")
url (str scheme "://" host (.-url req))
init #js {:method method
:headers headers}]
(when-not (or (= method "GET") (= method "HEAD"))
(aset init "body" req)
(aset init "duplex" "half"))
(core/request url init)))
(defn send-response!
[^js res ^js response]
(let [headers (headers->object (.-headers response))
status (.-status response)]
(.writeHead res status headers)
(if-let [body (.-body response)]
(let [^js stream (try
(let [Readable (.-Readable (js/require "stream"))]
(when (and Readable (.-fromWeb Readable))
(.fromWeb Readable body)))
(catch :default _ nil))]
(if stream
(do
(.pipe stream res)
(js/Promise.resolve nil))
(p/let [buf (.arrayBuffer response)]
(.end res (js/Buffer.from buf)))))
(do
(.end res)
(js/Promise.resolve nil)))))

View File

@@ -11,6 +11,26 @@
(let [url (js/URL. (.-url request))]
(.get (.-searchParams url) "token"))))
(defn- static-claims [env token]
(let [expected (aget env "DB_SYNC_AUTH_TOKEN")
user-id (or (aget env "DB_SYNC_STATIC_USER_ID") "user")
email (aget env "DB_SYNC_STATIC_EMAIL")
username (aget env "DB_SYNC_STATIC_USERNAME")]
(when (and (string? expected) (string? token) (= expected token))
(let [claims #js {"sub" user-id}]
(when (string? email) (aset claims "email" email))
(when (string? username) (aset claims "username" username))
claims))))
(defn- none-claims [env]
(let [user-id (or (aget env "DB_SYNC_STATIC_USER_ID") "user")
email (aget env "DB_SYNC_STATIC_EMAIL")
username (aget env "DB_SYNC_STATIC_USERNAME")
claims #js {"sub" user-id}]
(when (string? email) (aset claims "email" email))
(when (string? username) (aset claims "username" username))
claims))
(defn- decode-jwt-part [part]
(let [pad (if (pos? (mod (count part) 4))
(apply str (repeat (- 4 (mod (count part) 4)) "="))
@@ -31,7 +51,15 @@
nil)))
(defn auth-claims [request env]
(let [token (token-from-request request)]
(if (string? token)
(.catch (authorization/verify-jwt token env) (fn [_] nil))
(js/Promise.resolve nil))))
(let [token (token-from-request request)
driver (some-> (aget env "DB_SYNC_AUTH_DRIVER") string/lower-case)]
(case driver
"static"
(js/Promise.resolve (static-claims env token))
"none"
(js/Promise.resolve (none-claims env))
(if (string? token)
(.catch (authorization/verify-jwt token env) (fn [_] nil))
(js/Promise.resolve nil)))))

View File

@@ -1,13 +1,14 @@
(ns logseq.db-sync.worker.dispatch
(:require [clojure.string :as string]
[logseq.db-sync.common :as common]
[logseq.db-sync.platform.core :as platform]
[logseq.db-sync.worker.handler.assets :as assets-handler]
[logseq.db-sync.worker.handler.index :as index-handler]
[logseq.db-sync.worker.http :as http]
[promesa.core :as p]))
(defn handle-worker-fetch [request ^js env]
(let [url (js/URL. (.-url request))
(let [url (platform/request-url request)
path (.-pathname url)
method (.-method request)]
(cond
@@ -58,7 +59,7 @@
(.fetch stub request)
(do
(.set (.-searchParams new-url) "graph-id" graph-id)
(let [rewritten (js/Request. (.toString new-url) request)]
(let [rewritten (platform/request (.toString new-url) request)]
(.fetch stub rewritten)))))
access-resp)))
(http/bad-request "missing graph id")))

11
deps/db-sync/start.sh vendored Executable file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
export COGNITO_JWKS_URL="https://cognito-idp.us-east-1.amazonaws.com/us-east-1_dtagLnju8/.well-known/jwks.json"
export COGNITO_ISSUER="https://cognito-idp.us-east-1.amazonaws.com/us-east-1_dtagLnju8"
export COGNITO_CLIENT_ID="69cs1lgme7p8kbgld8n5kseii6"
# Optional: choose a fixed port
export DB_SYNC_PORT=8787
node worker/dist/node-adapter.js

View File

@@ -0,0 +1,106 @@
(ns logseq.db-sync.node-adapter-test
(:require [cljs.test :refer [deftest is async testing]]
[clojure.string :as string]
[logseq.db-sync.node.server :as node-server]
[logseq.db-sync.protocol :as protocol]
[promesa.core :as p]))
(def test-token "test-token")
(defn- auth-headers []
#js {"authorization" (str "Bearer " test-token)
"content-type" "application/json"})
(defn- post-json [url body]
(js/fetch url #js {:method "POST"
:headers (auth-headers)
:body (js/JSON.stringify (clj->js body))}))
(defn- get-json [url]
(js/fetch url #js {:method "GET" :headers (auth-headers)}))
(defn- parse-json [resp]
(.json resp))
(defn- start-test-server []
(let [suffix (str (random-uuid))
dir (str "tmp/db-sync-node-test/" suffix)]
(node-server/start! {:port 0
:auth-driver "static"
:auth-token test-token
:static-user-id "user-1"
:data-dir dir})))
(deftest node-adapter-http-roundtrip-test
(async done
(p/let [{:keys [base-url stop!]} (start-test-server)
health-resp (js/fetch (str base-url "/health"))
health-body (parse-json health-resp)]
(testing "health"
(is (.-ok health-resp))
(is (= true (aget health-body "ok"))))
(p/let [create-resp (post-json (str base-url "/graphs") {:graph-name "Test Graph"})
create-body (parse-json create-resp)
graph-id (aget create-body "graph-id")
access-resp (get-json (str base-url "/graphs/" graph-id "/access"))
access-body (parse-json access-resp)
sync-health (get-json (str base-url "/sync/" graph-id "/health"))
sync-health-body (parse-json sync-health)]
(testing "graph access"
(is (.-ok create-resp))
(is (string? graph-id))
(is (.-ok access-resp))
(is (= true (aget access-body "ok"))))
(testing "sync health"
(is (.-ok sync-health))
(is (= true (aget sync-health-body "ok"))))
(p/let [tx-data [{:block/uuid (random-uuid)
:block/content "hello"}]
txs (protocol/tx->transit tx-data)
tx-resp (post-json (str base-url "/sync/" graph-id "/tx/batch")
{:t-before 0
:txs txs})
tx-body (parse-json tx-resp)
pull-resp (get-json (str base-url "/sync/" graph-id "/pull?since=0"))
pull-body (parse-json pull-resp)]
(testing "tx batch"
(is (.-ok tx-resp))
(is (= "tx/batch/ok" (aget tx-body "type"))))
(testing "pull"
(is (.-ok pull-resp))
(is (= "pull/ok" (aget pull-body "type")))
(is (pos? (count (aget pull-body "txs")))))
(p/then (stop!) (fn [] (done))))))))
(deftest node-adapter-websocket-test
(async done
(p/let [{:keys [base-url stop!]} (start-test-server)
create-resp (post-json (str base-url "/graphs") {:graph-name "WS Graph"})
create-body (parse-json create-resp)
graph-id (aget create-body "graph-id")]
(testing "websocket hello and changed"
(let [ws-url (str (string/replace base-url "http" "ws") "/sync/" graph-id)
ws-module (js/require "ws")
WebSocket (or (.-WebSocket ws-module) ws-module)
client (new WebSocket ws-url #js {:headers (auth-headers)})
messages (atom [])
push-message (fn [data]
(let [text (if (string? data) data (.toString data))]
(swap! messages conj (js/JSON.parse text))))]
(.on client "message" push-message)
(.on client "open"
(fn []
(.send client (protocol/encode-message {:type "hello" :client "test"}))))
(p/let [_ (p/delay 50)
tx-data [{:block/uuid (random-uuid)
:block/content "ws"}]
txs (protocol/tx->transit tx-data)
_ (post-json (str base-url "/sync/" graph-id "/tx/batch")
{:t-before 0
:txs txs})
_ (p/delay 100)]
(let [types (set (map #(aget % "type") @messages))]
(is (contains? types "hello"))
(is (contains? types "changed")))
(.close client)
(p/then (stop!) (fn [] (done)))))))))

View File

@@ -0,0 +1,30 @@
(ns logseq.db-sync.node-config-test
(:require [cljs.test :refer [deftest is testing]]
[logseq.db-sync.node.config :as config]))
(defn- throws? [f]
(try
(f)
false
(catch :default _ true)))
(deftest normalize-config-auth-driver-test
(testing "static auth driver accepted"
(let [cfg (config/normalize-config {:auth-driver "static" :auth-token "x"})]
(is (= "static" (:auth-driver cfg)))))
(testing "unsupported auth driver throws"
(is (throws? #(config/normalize-config {:auth-driver "nope"})))))
(deftest normalize-config-storage-driver-test
(testing "sqlite storage driver accepted"
(let [cfg (config/normalize-config {:storage-driver "sqlite"})]
(is (= "sqlite" (:storage-driver cfg)))))
(testing "unsupported storage driver throws"
(is (throws? #(config/normalize-config {:storage-driver "other"})))))
(deftest normalize-config-assets-driver-test
(testing "filesystem assets driver accepted"
(let [cfg (config/normalize-config {:assets-driver "filesystem"})]
(is (= "filesystem" (:assets-driver cfg)))))
(testing "unsupported assets driver throws"
(is (throws? #(config/normalize-config {:assets-driver "s3"})))))

View File

@@ -0,0 +1,17 @@
(ns logseq.db-sync.platform-test
(:require [cljs.test :refer [deftest is testing]]
[logseq.db-sync.platform.core :as platform]))
(deftest platform-response-test
(testing "response builds status and headers"
(let [resp (platform/response "ok" #js {:status 201
:headers #js {"x-test" "1"}})]
(is (= 201 (.-status resp)))
(is (= "1" (.get (.-headers resp) "x-test"))))))
(deftest platform-request-url-test
(testing "request url parsing"
(let [req (platform/request "https://example.com/health" #js {:method "GET"})
url (platform/request-url req)]
(is (= "/health" (.-pathname url)))
(is (= "example.com" (.-host url))))))

View File

@@ -1,5 +1,8 @@
(ns logseq.db-sync.test-runner
(:require [cljs.test :as ct]
[logseq.db-sync.node-adapter-test]
[logseq.db-sync.node-config-test]
[logseq.db-sync.platform-test]
[shadow.test :as st]
[shadow.test.env :as env]))

248
deps/db-sync/yarn.lock vendored
View File

@@ -7,11 +7,43 @@ base64-js@^1.3.1:
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
better-sqlite3@^11.10.0:
version "11.10.0"
resolved "https://registry.yarnpkg.com/better-sqlite3/-/better-sqlite3-11.10.0.tgz#2b1b14c5acd75a43fd84d12cc291ea98cef57d98"
integrity sha512-EwhOpyXiOEL/lKzHz9AW1msWFNzGc/z+LzeB3/jnFJpxu+th2yqvzsSWas1v9jgs9+xiXJcD5A8CJxAG2TaghQ==
dependencies:
bindings "^1.5.0"
prebuild-install "^7.1.1"
bindings@^1.5.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df"
integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==
dependencies:
file-uri-to-path "1.0.0"
bl@^4.0.3:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==
dependencies:
buffer "^5.5.0"
inherits "^2.0.4"
readable-stream "^3.4.0"
buffer-from@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
buffer@^5.5.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==
dependencies:
base64-js "^1.3.1"
ieee754 "^1.1.13"
buffer@^6.0.3:
version "6.0.3"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6"
@@ -20,26 +52,174 @@ buffer@^6.0.3:
base64-js "^1.3.1"
ieee754 "^1.2.1"
ieee754@^1.2.1:
chownr@^1.1.1:
version "1.1.4"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
decompress-response@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc"
integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==
dependencies:
mimic-response "^3.1.0"
deep-extend@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
detect-libc@^2.0.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.1.2.tgz#689c5dcdc1900ef5583a4cb9f6d7b473742074ad"
integrity sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==
end-of-stream@^1.1.0, end-of-stream@^1.4.1:
version "1.4.5"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.5.tgz#7344d711dea40e0b74abc2ed49778743ccedb08c"
integrity sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==
dependencies:
once "^1.4.0"
expand-template@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
file-uri-to-path@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==
fs-constants@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
github-from-package@0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce"
integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==
ieee754@^1.1.13, ieee754@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
inherits@^2.0.3, inherits@^2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
ini@~1.3.0:
version "1.3.8"
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
isexe@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-3.1.1.tgz#4a407e2bd78ddfb14bea0c27c6f7072dde775f0d"
integrity sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==
mimic-response@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9"
integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==
minimist@^1.2.0, minimist@^1.2.3:
version "1.2.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
version "0.5.3"
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
napi-build-utils@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-2.0.0.tgz#13c22c0187fcfccce1461844136372a47ddc027e"
integrity sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==
node-abi@^3.3.0:
version "3.87.0"
resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.87.0.tgz#423e28fea5c2f195fddd98acded9938c001ae6dd"
integrity sha512-+CGM1L1CgmtheLcBuleyYOn7NWPVu0s0EJH2C4puxgEZb9h8QpR9G2dBfZJOAUhi7VQxuBPMd0hiISWcTyiYyQ==
dependencies:
semver "^7.3.5"
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
dependencies:
wrappy "1"
prebuild-install@^7.1.1:
version "7.1.3"
resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.3.tgz#d630abad2b147443f20a212917beae68b8092eec"
integrity sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==
dependencies:
detect-libc "^2.0.0"
expand-template "^2.0.3"
github-from-package "0.0.0"
minimist "^1.2.3"
mkdirp-classic "^0.5.3"
napi-build-utils "^2.0.0"
node-abi "^3.3.0"
pump "^3.0.0"
rc "^1.2.7"
simple-get "^4.0.0"
tar-fs "^2.0.0"
tunnel-agent "^0.6.0"
process@^0.11.10:
version "0.11.10"
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==
pump@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.3.tgz#151d979f1a29668dc0025ec589a455b53282268d"
integrity sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
rc@^1.2.7:
version "1.2.8"
resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
dependencies:
deep-extend "^0.6.0"
ini "~1.3.0"
minimist "^1.2.0"
strip-json-comments "~2.0.1"
readable-stream@^3.1.1, readable-stream@^3.4.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==
dependencies:
inherits "^2.0.3"
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
readline-sync@^1.4.10:
version "1.4.10"
resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b"
integrity sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==
safe-buffer@^5.0.1, safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
semver@^7.3.5:
version "7.7.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.3.tgz#4b5f4143d007633a8dc671cd0a6ef9147b8bb946"
integrity sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==
shadow-cljs-jar@1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/shadow-cljs-jar/-/shadow-cljs-jar-1.3.4.tgz#0939d91c468b4bc5eab5a958f79e7ef5696fdf62"
@@ -58,6 +238,20 @@ shadow-cljs@^3.3.4:
which "^5.0.0"
ws "^8.18.1"
simple-concat@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f"
integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==
simple-get@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543"
integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==
dependencies:
decompress-response "^6.0.0"
once "^1.3.1"
simple-concat "^1.0.0"
source-map-support@^0.5.21:
version "0.5.21"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
@@ -71,6 +265,51 @@ source-map@^0.6.0:
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
string_decoder@^1.1.1:
version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
dependencies:
safe-buffer "~5.2.0"
strip-json-comments@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
tar-fs@^2.0.0:
version "2.1.4"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.4.tgz#800824dbf4ef06ded9afea4acafe71c67c76b930"
integrity sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==
dependencies:
chownr "^1.1.1"
mkdirp-classic "^0.5.2"
pump "^3.0.0"
tar-stream "^2.1.4"
tar-stream@^2.1.4:
version "2.2.0"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
dependencies:
bl "^4.0.3"
end-of-stream "^1.4.1"
fs-constants "^1.0.0"
inherits "^2.0.3"
readable-stream "^3.1.1"
tunnel-agent@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==
dependencies:
safe-buffer "^5.0.1"
util-deprecate@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
which@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/which/-/which-5.0.0.tgz#d93f2d93f79834d4363c7d0c23e00d07c466c8d6"
@@ -78,7 +317,12 @@ which@^5.0.0:
dependencies:
isexe "^3.1.1"
ws@^8.18.1:
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
ws@^8.18.1, ws@^8.18.3:
version "8.19.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.19.0.tgz#ddc2bdfa5b9ad860204f5a72a4863a8895fd8c8b"
integrity sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==

View File

@@ -0,0 +1,116 @@
# DB Sync Node.js Adapter Implementation Plan
Goal: Build a Node.js server adapter for DB sync so users can self-host without Cloudflare dependencies.
Architecture: Reuse the existing db-sync core logic and route handlers while introducing a platform abstraction layer that swaps Cloudflare APIs for Node.js equivalents.
Architecture: Provide a new Node.js entrypoint that wires HTTP and WebSocket handling to the shared handlers and pluggable storage and auth backends.
Tech Stack: ClojureScript, shadow-cljs, Node.js LTS, standard Node HTTP and WebSocket libraries, existing db-sync ClojureScript modules.
Related: Builds on docs/agent-guide/db-sync/db-sync-guide.md and docs/agent-guide/db-sync/protocol.md and docs/adr/0001-nodejs-db-sync-server-adapter.md.
## Problem statement
The current DB sync server implementation assumes a Cloudflare Worker runtime and related APIs.
That assumption prevents users from running the sync server on standard Node.js infrastructure.
We need a portable adapter that preserves the DB sync protocol and behavior while replacing Cloudflare-specific primitives.
## Testing Plan
I will add an integration test that spins up the Node.js adapter, runs a health check, and performs a minimal pull and tx/batch round-trip over HTTP to confirm protocol compatibility.
I will add an integration test that opens a WebSocket connection, performs hello and pull, and validates changed notifications on tx/batch.
I will add a unit test that exercises the platform abstraction layer to ensure request and response coercion behave the same for both Node and worker inputs.
I will add a unit test that validates storage and auth adapter selection from configuration, including error handling for missing drivers.
NOTE: I will write all tests before I add any implementation behavior.
## Scope
The Node.js adapter will implement the existing HTTP and WebSocket endpoints as described in docs/agent-guide/db-sync/protocol.md.
The adapter will run the same handler logic currently used by the Cloudflare worker.
The adapter will support pluggable storage and auth providers to match the existing worker behavior.
## Non-goals
This plan does not add new DB sync protocol features.
This plan does not migrate existing cloud hosting or change client behavior.
This plan does not redesign storage schemas or transaction ordering.
## Architecture Overview
The adapter will introduce a platform boundary and a Node runtime entrypoint that binds HTTP and WS requests to the existing worker handlers.
The platform boundary will normalize request, response, and WebSocket interfaces so the core logic remains unchanged.
The Node entrypoint will be responsible only for server lifecycle and wiring adapters.
+--------------------+ HTTP/WS +------------------------+ storage/auth +------------------+
| Logseq Client Apps | <--------------> | Node.js DB Sync Adapter | <------------------> | Storage Backends |
+--------------------+ +------------------------+ +------------------+
| Shared db-sync handlers |
+------------------------+
## Implementation Plan
1. Read docs/agent-guide/db-sync/db-sync-guide.md and docs/agent-guide/db-sync/protocol.md and list the Cloudflare-specific APIs used by the worker in deps/db-sync/src/logseq/db_sync/worker/.
2. Create a platform abstraction namespace in deps/db-sync/src/logseq/db_sync/platform/core.cljs that defines the minimal request, response, and WebSocket operations used by handlers.
3. Add a Cloudflare platform adapter in deps/db-sync/src/logseq/db_sync/platform/cloudflare.cljs that wraps the current Worker Request, Response, and WebSocket types.
4. Add a Node platform adapter in deps/db-sync/src/logseq/db_sync/platform/node.cljs that wraps Node HTTP requests, responses, and WebSocket connections.
5. Update deps/db-sync/src/logseq/db_sync/worker/dispatch.cljs and deps/db-sync/src/logseq/db_sync/worker/http.cljs to depend on the platform abstraction instead of direct Worker APIs.
6. Add a Node server entrypoint in deps/db-sync/src/logseq/db_sync/node/entry.cljs that sets up HTTP routes, WS upgrade handling, and lifecycle start/stop.
7. Add a Node routing layer in deps/db-sync/src/logseq/db_sync/node/routes.cljs that maps incoming requests to the existing worker route handlers.
8. Introduce configuration parsing in deps/db-sync/src/logseq/db_sync/node/config.cljs to select storage and auth drivers via environment variables.
9. Add Node adapter implementations for storage and assets in deps/db-sync/src/logseq/db_sync/node/storage.cljs and deps/db-sync/src/logseq/db_sync/node/assets.cljs that map to the existing storage interfaces.
10. Add a new shadow-cljs build target in deps/db-sync/shadow-cljs.edn for the Node adapter output.
11. Add build and run scripts to deps/db-sync/package.json for the Node adapter, including a dev watch command.
12. Update docs/agent-guide/db-sync/db-sync-guide.md with the new local dev and test commands for the Node adapter.
13. Add a new self-hosting section to docs/develop-logseq.md with minimal steps to run the Node adapter.
14. Add integration tests under deps/db-sync/test/logseq/db_sync/node_adapter_test.cljs that launch the Node adapter and exercise HTTP and WS paths.
15. Add unit tests under deps/db-sync/test/logseq/db_sync/platform_test.cljs that cover request normalization and error propagation.
16. Run the tests using the commands listed in the Verification section and confirm they fail before implementation and pass after implementation.
## Configuration Matrix
Use the following environment variables for the Node adapter configuration.
| Variable | Purpose | Example |
| --- | --- | --- |
| DB_SYNC_PORT | HTTP server port | 8080 |
| DB_SYNC_BASE_URL | External base URL for asset links | https://sync.example.com |
| DB_SYNC_STORAGE_DRIVER | Storage backend selection | sqlite |
| DB_SYNC_AUTH_DRIVER | Auth backend selection | bearer |
| DB_SYNC_ASSETS_DRIVER | Asset storage backend selection | filesystem |
| DB_SYNC_LOG_LEVEL | Log verbosity | info |
## Verification
Run the server-side test suite.
```bash
bb dev:db-sync-test
```
Expected result is a zero exit code and no failing tests.
Run the Node adapter integration tests.
```bash
cd deps/db-sync
npm run test:node-adapter
```
Expected result is a zero exit code and logs that the Node adapter started and all tests passed.
## Edge Cases
Unauthorized requests should return the same 401 or 403 responses as the Cloudflare worker.
Invalid request payloads must produce the same error responses as the worker routes.
WebSocket message validation must reject unknown types with the same error shape.
Snapshot upload should preserve content-encoding and reject missing bodies consistently.
Asset uploads must enforce size limits and return the same error codes as the worker.
## Testing Details
I will add integration tests that launch the Node adapter and use HTTP and WebSocket calls to validate real protocol behavior rather than mocking handlers.
I will add unit tests for the platform adapters to ensure coercion and error handling match existing worker semantics.
These tests validate observable behavior and response shapes instead of internal data structures.
## Implementation Details
- Introduce a platform interface to normalize request, response, and WebSocket operations.
- Keep all protocol validation and handler logic inside deps/db-sync/src/logseq/db_sync/worker to avoid divergence.
- Make the Node entrypoint responsible only for wiring and lifecycle.
- Add configuration parsing and driver selection in Node-specific namespaces.
- Add a new shadow-cljs build target for the Node adapter output.
- Add npm scripts for building and testing the Node adapter.
- Update docs to include self-hosting and local dev guidance.
## Question
Where should the AGENTS.workflow.md guidance live for plan documents in this repo.
Do we want to support the same storage backends as Cloudflare on day one or limit to a single local driver for the first release.
Should the Node adapter include optional CORS configuration or match the current worker defaults exactly.
---

View File

@@ -47,6 +47,8 @@ This guide helps AI agents implement and review db-sync features consistently ac
## Testing & Verification
- Local dev(client+server): `bb dev:db-sync-start` runs the db-sync watcher, `wrangler dev`, and `yarn watch` with `ENABLE_DB_SYNC_LOCAL=true`
- DB-sync server side unit-tests: `bb dev:db-sync-test`
- Node adapter tests: `cd deps/db-sync && npm run test:node-adapter`
- Node adapter build/run: `cd deps/db-sync && npm run build:node-adapter && npm run start:node-adapter`
## Review Checklist
- Protocol versioning and error handling are consistent across client/server.

View File

@@ -122,3 +122,21 @@ yarn release-electron
```
The final released binaries or installers will be at `static/out/`.
## DB sync Node adapter (self-hosted)
Build and run the Node.js adapter for self-hosted DB sync.
```bash
cd deps/db-sync
yarn install
DB_SYNC_AUTH_DRIVER=static DB_SYNC_AUTH_TOKEN=dev-token DB_SYNC_PORT=8080 yarn build:node-adapter
DB_SYNC_AUTH_DRIVER=static DB_SYNC_AUTH_TOKEN=dev-token DB_SYNC_PORT=8080 yarn start:node-adapter
```
Optional environment variables:
- DB_SYNC_DATA_DIR (defaults to data/db-sync)
- DB_SYNC_STATIC_USER_ID (defaults to user)
- DB_SYNC_STATIC_EMAIL
- DB_SYNC_STATIC_USERNAME

View File

@@ -17,6 +17,7 @@
"@tailwindcss/typography": "0.5.7",
"@types/gulp": "^4.0.7",
"autoprefixer": "^10.4.13",
"better-sqlite3": "^12.6.2",
"cross-env": "^7.0.3",
"cssnano": "^5.1.13",
"del": "^6.0.0",
@@ -47,7 +48,8 @@
"tailwindcss-animate": "^1.0.7",
"typescript": "^4.4.3",
"webpack": "^5.98.0",
"webpack-cli": "^6.0.1"
"webpack-cli": "^6.0.1",
"ws": "^8.19.0"
},
"scripts": {
"watch": "run-p gulp:watch cljs:watch webpack-app-watch",

View File

@@ -2211,6 +2211,14 @@ base@^0.11.1:
mixin-deep "^1.2.0"
pascalcase "^0.1.1"
better-sqlite3@^12.6.2:
version "12.6.2"
resolved "https://registry.yarnpkg.com/better-sqlite3/-/better-sqlite3-12.6.2.tgz#770649f28a62e543a360f3dfa1afe4cc944b1937"
integrity sha512-8VYKM3MjCa9WcaSAI3hzwhmyHVlH8tiGFwf0RlTsZPWJ1I5MkzjiudCo4KC4DxOaL/53A5B1sI/IbldNFDbsKA==
dependencies:
bindings "^1.5.0"
prebuild-install "^7.1.1"
big-integer@1.6.x:
version "1.6.52"
resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85"
@@ -11046,6 +11054,11 @@ ws@^7.4.6:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
ws@^8.19.0:
version "8.19.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.19.0.tgz#ddc2bdfa5b9ad860204f5a72a4863a8895fd8c8b"
integrity sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==
ws@~8.17.1:
version "8.17.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"