remove legacy rtc implementation and tests

This commit is contained in:
Tienson Qin
2026-01-29 18:14:09 +08:00
parent 16221de6f3
commit 8472d03826
60 changed files with 1285 additions and 7489 deletions

View File

@@ -255,49 +255,6 @@
(<= 2 (count maybe-encrypted-package)))
(<decrypt-text aes-key maybe-encrypted-package)))
(defn <encrypt-map
[aes-key encrypt-attr-set m]
(assert (map? m))
(reduce
(fn [map-p encrypt-attr]
(p/let [m map-p]
(if-let [v (get m encrypt-attr)]
(p/let [v' (p/chain (<encrypt-text aes-key v) ldb/write-transit-str)]
(assoc m encrypt-attr v'))
m)))
(p/promise m) encrypt-attr-set))
(defn <encrypt-av-coll
"see also `rtc-schema/av-schema`"
[aes-key encrypt-attr-set av-coll]
(p/all
(mapv
(fn [[a v & others]]
(p/let [v' (if (and (contains? encrypt-attr-set a)
(string? v))
(p/chain (<encrypt-text aes-key v) ldb/write-transit-str)
v)]
(apply conj [a v'] others)))
av-coll)))
(defn <decrypt-map
[aes-key encrypt-attr-set m]
(assert (map? m))
(reduce
(fn [map-p encrypt-attr]
(p/let [m map-p]
(if-let [v (get m encrypt-attr)]
(if (string? v)
(->
(p/let [v' (<decrypt-text-if-encrypted aes-key (ldb/read-transit-str v))]
(if v'
(assoc m encrypt-attr v')
m))
(p/catch (fn [e] (ex-info "decrypt map" {:m m :decrypt-attr encrypt-attr} e))))
m)
m)))
(p/promise m) encrypt-attr-set))
(defn <encrypt-text-by-text-password
[text-password text]
(assert (and (string? text-password) (string? text)))

View File

@@ -27,49 +27,48 @@
[& flows]
(m/ap (m/?> (m/?> (count flows) (m/seed flows)))))
(def never-flow (m/ap (m/? m/never)))
(def delays (reductions * 1000 (repeat 2)))
(def ^:private retry-sentinel (js-obj))
(defn backoff
"Retry task when it throw exception `(get ex-data :missionary/retry)`
(comment
(def never-flow (m/ap (m/? m/never)))
(def delays (reductions * 1000 (repeat 2)))
(def ^:private retry-sentinel (js-obj))
(defn backoff
"Retry task when it throw exception `(get ex-data :missionary/retry)`
:delay-seq - retry delay-msecs
:reset-flow - retry immediately when getting value from flow and reset delays to init state"
[{:keys [delay-seq reset-flow]
:or {delay-seq (take 4 delays)
reset-flow never-flow}}
task]
(let [reset-flow* (mix reset-flow never-flow)]
(m/sp
(loop [[delay & rest-delays] (seq delay-seq)]
(let [r (try
(m/? task)
(catch :default e
(if (and (some-> e ex-data :missionary/retry)
(pos-int? delay))
(let [delay-or-reset
(m/? (m/race (m/sleep delay :delay)
(m/reduce (fn [_ r] (when r (reduced :reset))) nil
(->> (continue-flow reset-flow*)
(m/eduction (drop 1) (take 1))))))
rest-delays*
(case delay-or-reset
:delay
(do (println :missionary/retry "after" delay "ms (" (ex-message e) ")")
rest-delays)
:reset
(do (println :missionary/retry "retry now (" (ex-message e) ")")
delay-seq))]
[retry-sentinel rest-delays*])
(throw e))))]
(if (and (vector? r)
(first r) ;; if delete this `(first r)`,
[{:keys [delay-seq reset-flow]
:or {delay-seq (take 4 delays)
reset-flow never-flow}}
task]
(let [reset-flow* (mix reset-flow never-flow)]
(m/sp
(loop [[delay & rest-delays] (seq delay-seq)]
(let [r (try
(m/? task)
(catch :default e
(if (and (some-> e ex-data :missionary/retry)
(pos-int? delay))
(let [delay-or-reset
(m/? (m/race (m/sleep delay :delay)
(m/reduce (fn [_ r] (when r (reduced :reset))) nil
(->> (continue-flow reset-flow*)
(m/eduction (drop 1) (take 1))))))
rest-delays*
(case delay-or-reset
:delay
(do (println :missionary/retry "after" delay "ms (" (ex-message e) ")")
rest-delays)
:reset
(do (println :missionary/retry "retry now (" (ex-message e) ")")
delay-seq))]
[retry-sentinel rest-delays*])
(throw e))))]
(if (and (vector? r)
(first r) ;; if delete this `(first r)`,
;; the code continues to the next line even if r=0...
;; I suspect it's a bug in missionary.
(identical? retry-sentinel (first r)))
(recur (second r))
r))))))
(identical? retry-sentinel (first r)))
(recur (second r))
r)))))))
(defn clock
"Return a flow that emits `value` every `interval-ms`."
@@ -84,16 +83,17 @@
(recur))))
(continue-flow value))))
(defn concurrent-exec-flow
"Return a flow.
(comment
(defn concurrent-exec-flow
"Return a flow.
Concurrent exec `f` on `flow` with max concurrent count `par`.
- `(f v)` return a task.
- `v` is value from `flow`"
[par flow f]
(assert (pos-int? par))
(m/ap
(let [v (m/?> par flow)]
(m/? (f v)))))
[par flow f]
(assert (pos-int? par))
(m/ap
(let [v (m/?> par flow)]
(m/? (f v))))))
(defn debounce
[duration-ms flow]

View File

@@ -51,15 +51,6 @@
;; Enable for local development
;; (def PUBLISH-API-BASE "http://localhost:8787")
(goog-define ENABLE-RTC-SYNC-PRODUCTION false)
(if ENABLE-RTC-SYNC-PRODUCTION
(def RTC-WS-URL "wss://ws.logseq.com/rtc-sync?token=%s")
(def RTC-WS-URL "wss://ws-dev.logseq.com/rtc-sync?token=%s"))
;; (goog-define ENABLE-DB-SYNC false)
(goog-define ENABLE-DB-SYNC true)
(defonce db-sync-enabled? ENABLE-DB-SYNC)
(goog-define ENABLE-DB-SYNC-LOCAL false)
(defonce db-sync-local? ENABLE-DB-SYNC-LOCAL)

View File

@@ -1,397 +0,0 @@
(ns frontend.handler.db-based.db-sync
"DB-sync handler based on Cloudflare Durable Objects."
(:require [clojure.string :as string]
[frontend.config :as config]
[frontend.db :as db]
[frontend.handler.notification :as notification]
[frontend.handler.repo :as repo-handler]
[frontend.handler.user :as user-handler]
[frontend.state :as state]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db-sync.malli-schema :as db-sync-schema]
[logseq.db.sqlite.util :as sqlite-util]
[promesa.core :as p]))
(defn- ws->http-base [ws-url]
(when (string? ws-url)
(let [base (cond
(string/starts-with? ws-url "wss://")
(str "https://" (subs ws-url (count "wss://")))
(string/starts-with? ws-url "ws://")
(str "http://" (subs ws-url (count "ws://")))
:else ws-url)
base (string/replace base #"/sync/%s$" "")]
base)))
(defn http-base []
(or config/db-sync-http-base
(ws->http-base config/db-sync-ws-url)))
(def ^:private snapshot-text-decoder (js/TextDecoder.))
(defn- ->uint8 [data]
(cond
(instance? js/Uint8Array data) data
(instance? js/ArrayBuffer data) (js/Uint8Array. data)
(string? data) (.encode (js/TextEncoder.) data)
:else (js/Uint8Array. data)))
(defn- decode-snapshot-rows [payload]
(sqlite-util/read-transit-str (.decode snapshot-text-decoder (->uint8 payload))))
(defn- frame-len [^js data offset]
(let [view (js/DataView. (.-buffer data) offset 4)]
(.getUint32 view 0 false)))
(defn- concat-bytes
[^js a ^js b]
(cond
(nil? a) b
(nil? b) a
:else
(let [out (js/Uint8Array. (+ (.-byteLength a) (.-byteLength b)))]
(.set out a 0)
(.set out b (.-byteLength a))
out)))
(defn- parse-framed-chunk
[buffer chunk]
(let [data (concat-bytes buffer chunk)
total (.-byteLength data)]
(loop [offset 0
rows []]
(if (< (- total offset) 4)
{:rows rows
:buffer (when (< offset total)
(.slice data offset total))}
(let [len (frame-len data offset)
next-offset (+ offset 4 len)]
(if (<= next-offset total)
(let [payload (.slice data (+ offset 4) next-offset)
decoded (decode-snapshot-rows payload)]
(recur next-offset (into rows decoded)))
{:rows rows
:buffer (.slice data offset total)}))))))
(defn- finalize-framed-buffer
[buffer]
(if (or (nil? buffer) (zero? (.-byteLength buffer)))
[]
(let [{:keys [rows buffer]} (parse-framed-chunk nil buffer)]
(if (and (seq rows) (or (nil? buffer) (zero? (.-byteLength buffer))))
rows
(throw (ex-info "incomplete framed buffer" {:buffer buffer :rows rows}))))))
(defn- auth-headers []
(when-let [token (state/get-auth-id-token)]
{"authorization" (str "Bearer " token)}))
(defn- with-auth-headers [opts]
(if-let [auth (auth-headers)]
(assoc opts :headers (merge (or (:headers opts) {}) auth))
opts))
(declare fetch-json)
(declare coerce-http-response)
(defn fetch-json
[url opts {:keys [response-schema error-schema] :or {error-schema :error}}]
(p/let [resp (js/fetch url (clj->js (with-auth-headers opts)))
text (.text resp)
data (when (seq text) (js/JSON.parse text))]
(if (.-ok resp)
(let [body (js->clj data :keywordize-keys true)
body (if response-schema
(coerce-http-response response-schema body)
body)]
(if (or (nil? response-schema) body)
body
(throw (ex-info "db-sync invalid response"
{:status (.-status resp)
:url url
:body body}))))
(let [body (when data (js->clj data :keywordize-keys true))
body (if error-schema
(coerce-http-response error-schema body)
body)]
(throw (ex-info "db-sync request failed"
{:status (.-status resp)
:url url
:body body}))))))
(def ^:private invalid-coerce ::invalid-coerce)
(defn- coerce
[coercer value context]
(try
(coercer value)
(catch :default e
(log/error :db-sync/malli-coerce-failed (merge context {:error e :value value}))
invalid-coerce)))
(defn- coerce-http-request [schema-key body]
(if-let [coercer (get db-sync-schema/http-request-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :request})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn- coerce-http-response [schema-key body]
(if-let [coercer (get db-sync-schema/http-response-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :response})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn <rtc-start!
[repo & {:keys [_stop-before-start?] :as _opts}]
(log/info :db-sync/start {:repo repo})
(state/<invoke-db-worker :thread-api/db-sync-start repo))
(defn <rtc-stop!
[]
(log/info :db-sync/stop true)
(state/<invoke-db-worker :thread-api/db-sync-stop))
(defn <rtc-update-presence!
[editing-block-uuid]
(state/<invoke-db-worker :thread-api/db-sync-update-presence editing-block-uuid))
(defn <rtc-get-users-info
[]
(when-let [graph-uuid (ldb/get-graph-rtc-uuid (db/get-db))]
(let [base (http-base)
repo (state/get-current-repo)]
(if base
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
resp (fetch-json (str base "/graphs/" graph-uuid "/members")
{:method "GET"}
{:response-schema :graph-members/list})
members (:members resp)
users (mapv (fn [{:keys [user-id role email username]}]
(let [name (or username email user-id)
user-type (some-> role keyword)]
(cond-> {:user/uuid user-id
:user/name name
:graph<->user/user-type user-type}
(string? email) (assoc :user/email email))))
members)]
(state/set-state! :rtc/users-info {repo users}))
(p/resolved nil)))))
(defn <rtc-create-graph!
[repo]
(let [schema-version (some-> (ldb/get-graph-schema-version (db/get-db)) :major str)
base (http-base)]
(if base
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
body (coerce-http-request :graphs/create
{:graph-name (string/replace repo config/db-version-prefix "")
:schema-version schema-version})
result (if (nil? body)
(p/rejected (ex-info "db-sync invalid create-graph body"
{:repo repo}))
(fetch-json (str base "/graphs")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :graphs/create}))
graph-id (:graph-id result)]
(if graph-id
(p/do!
(ldb/transact! repo [(sqlite-util/kv :logseq.kv/db-type "db")
(sqlite-util/kv :logseq.kv/graph-uuid (uuid graph-id))
(sqlite-util/kv :logseq.kv/graph-rtc-e2ee? true)])
graph-id)
(p/rejected (ex-info "db-sync missing graph id in create response"
{:type :db-sync/invalid-graph
:response result}))))
(p/rejected (ex-info "db-sync missing graph info"
{:type :db-sync/invalid-graph
:base base})))))
(defn <rtc-delete-graph!
[graph-uuid _schema-version]
(let [base (http-base)]
(if (and graph-uuid base)
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)]
(fetch-json (str base "/graphs/" graph-uuid)
{:method "DELETE"}
{:response-schema :graphs/delete}))
(p/rejected (ex-info "db-sync missing graph id"
{:type :db-sync/invalid-graph
:graph-uuid graph-uuid
:base base})))))
(defn <rtc-download-graph!
[graph-name graph-uuid _graph-schema-version]
(state/set-state! :rtc/downloading-graph-uuid graph-uuid)
(let [base (http-base)]
(-> (if (and graph-uuid base)
(let [download-url* (atom nil)]
(-> (p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
graph (str config/db-version-prefix graph-name)
pull-resp (fetch-json (str base "/sync/" graph-uuid "/pull")
{:method "GET"}
{:response-schema :sync/pull})
remote-tx (:t pull-resp)
_ (when-not (integer? remote-tx)
(throw (ex-info "non-integer remote-tx when downloading graph"
{:graph graph-name
:remote-tx remote-tx})))
download-resp (fetch-json (str base "/sync/" graph-uuid "/snapshot/download")
{:method "GET"}
{:response-schema :sync/snapshot-download})
download-url (:url download-resp)
_ (reset! download-url* download-url)
_ (when-not (string? download-url)
(throw (ex-info "missing snapshot download url"
{:graph graph-name
:response download-resp})))
resp (js/fetch download-url (clj->js (with-auth-headers {:method "GET"})))
total-bytes (when-let [raw (some-> resp .-headers (.get "content-length"))]
(let [parsed (js/parseInt raw 10)]
(when-not (js/isNaN parsed) parsed)))
_ (state/pub-event!
[:rtc/log {:type :rtc.log/download
:sub-type :download-progress
:graph-uuid graph-uuid
:message (str "Start downloading graph snapshot, file size: " total-bytes)}])]
(when-not (.-ok resp)
(throw (ex-info "snapshot download failed"
{:graph graph-name
:status (.-status resp)})))
(when-not (.-body resp)
(throw (ex-info "snapshot download missing body"
{:graph graph-name})))
(p/let [reader (.getReader (.-body resp))]
(p/loop [buffer nil
total 0
total-rows []
loaded 0]
(p/let [chunk (.read reader)]
(if (.-done chunk)
(let [rows (finalize-framed-buffer buffer)
total' (+ total (count rows))
total-rows' (into total-rows rows)]
(state/pub-event!
[:rtc/log {:type :rtc.log/download
:sub-type :download-completed
:graph-uuid graph-uuid
:message "Graph snapshot downloaded"}])
(when (seq total-rows')
(state/<invoke-db-worker :thread-api/db-sync-import-kvs-rows
graph total-rows' true graph-uuid remote-tx))
total')
(let [value (.-value chunk)
loaded' (+ loaded (.-byteLength value))
{:keys [rows buffer]} (parse-framed-chunk buffer value)
total' (+ total (count rows))]
(p/recur buffer total' (into total-rows rows) loaded')))))))
(p/finally
(fn []
(when-let [download-url @download-url*]
(js/fetch download-url (clj->js (with-auth-headers {:method "DELETE"}))))))))
(p/rejected (ex-info "db-sync missing graph info"
{:type :db-sync/invalid-graph
:graph-uuid graph-uuid
:base base})))
(p/catch (fn [error]
(throw error)))
(p/finally
(fn []
(state/set-state! :rtc/downloading-graph-uuid nil))))))
(defn <get-remote-graphs
[]
(let [base (http-base)]
(if-not base
(p/resolved [])
(-> (p/let [_ (state/set-state! :rtc/loading-graphs? true)
_ (js/Promise. user-handler/task--ensure-id&access-token)
resp (fetch-json (str base "/graphs")
{:method "GET"}
{:response-schema :graphs/list})
graphs (:graphs resp)
result (mapv (fn [graph]
(merge
{:url (str config/db-version-prefix (:graph-name graph))
:GraphName (:graph-name graph)
:GraphSchemaVersion (:schema-version graph)
:GraphUUID (:graph-id graph)
:rtc-graph? true
:graph<->user-user-type (:role graph)
:graph<->user-grant-by-user (:invited-by graph)}
(dissoc graph :graph-id :graph-name :schema-version :role :invited-by)))
graphs)]
(state/set-state! :rtc/graphs result)
(repo-handler/refresh-repos!)
result)
(p/finally
(fn []
(state/set-state! :rtc/loading-graphs? false)))))))
(defn <rtc-invite-email
[graph-uuid email]
(let [base (http-base)
graph-uuid (str graph-uuid)]
(if (and base (string? graph-uuid) (string? email))
(->
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
body (coerce-http-request :graph-members/create
{:email email
:role "member"})
_ (when (nil? body)
(throw (ex-info "db-sync invalid invite body"
{:graph-uuid graph-uuid
:email email})))
_ (fetch-json (str base "/graphs/" graph-uuid "/members")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :graph-members/create})
repo (state/get-current-repo)
e2ee? (ldb/get-graph-rtc-e2ee? (db/get-db))
_ (when (and repo e2ee?)
(state/<invoke-db-worker :thread-api/db-sync-grant-graph-access
repo graph-uuid email))]
(notification/show! "Invitation sent!" :success))
(p/catch (fn [e]
(notification/show! "Something wrong, please try again." :error)
(log/error :db-sync/invite-email-failed
{:error e
:graph-uuid graph-uuid
:email email}))))
(p/rejected (ex-info "db-sync missing invite info"
{:type :db-sync/invalid-invite
:graph-uuid graph-uuid
:email email
:base base})))))
(defn <rtc-remove-member!
[graph-uuid member-id]
(let [base (http-base)
graph-uuid (some-> graph-uuid str)
member-id (some-> member-id str)]
(if (and base (string? graph-uuid) (string? member-id))
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)]
(fetch-json (str base "/graphs/" graph-uuid "/members/" member-id)
{:method "DELETE"}
{:response-schema :graph-members/delete}))
(p/rejected (ex-info "db-sync missing member info"
{:type :db-sync/invalid-member
:graph-uuid graph-uuid
:member-id member-id
:base base})))))
(defn <rtc-leave-graph!
[graph-uuid]
(if-let [member-id (user-handler/user-uuid)]
(<rtc-remove-member! graph-uuid member-id)
(p/rejected (ex-info "db-sync missing user id"
{:type :db-sync/invalid-member
:graph-uuid graph-uuid}))))

View File

@@ -1,185 +0,0 @@
(ns frontend.handler.db-based.rtc
"RTC handler"
(:require [clojure.pprint :as pp]
[frontend.config :as config]
[frontend.db :as db]
[frontend.handler.db-based.rtc-flows :as rtc-flows]
[frontend.handler.notification :as notification]
[frontend.handler.repo :as repo-handler]
[frontend.handler.user :as user-handler]
[frontend.state :as state]
[frontend.util :as util]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db.common.sqlite :as common-sqlite]
[logseq.shui.ui :as shui]
[promesa.core :as p]))
(defn <rtc-create-graph!
[repo]
(p/do!
(js/Promise. user-handler/task--ensure-id&access-token)
(let [token (state/get-auth-id-token)
repo-name (common-sqlite/sanitize-db-name repo)]
(state/<invoke-db-worker :thread-api/rtc-async-upload-graph repo token repo-name))))
(defn <rtc-delete-graph!
[graph-uuid schema-version]
(p/do!
(js/Promise. user-handler/task--ensure-id&access-token)
(let [token (state/get-auth-id-token)]
(state/<invoke-db-worker :thread-api/rtc-delete-graph token graph-uuid schema-version))))
(defn <rtc-download-graph!
[graph-name graph-uuid graph-schema-version timeout-ms]
(assert (some? graph-schema-version))
(state/set-state! :rtc/downloading-graph-uuid graph-uuid)
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
token (state/get-auth-id-token)
download-info-uuid (state/<invoke-db-worker
:thread-api/rtc-request-download-graph token graph-uuid graph-schema-version)
{:keys [_download-info-uuid
download-info-s3-url
_download-info-tx-instant
_download-info-t
_download-info-created-at]
:as result}
(state/<invoke-db-worker :thread-api/rtc-wait-download-graph-info-ready
token download-info-uuid graph-uuid graph-schema-version timeout-ms)]
(->
(when (not= result :timeout)
(assert (some? download-info-s3-url) result)
(p/let [r (state/<invoke-db-worker :thread-api/rtc-download-graph-from-s3
graph-uuid graph-name download-info-s3-url)]
(when (instance? ExceptionInfo r)
(log/error :rtc-download-graph-from-s3 r))))
(p/finally
#(state/set-state! :rtc/downloading-graph-uuid nil)))))
(defn <rtc-stop!
[]
(state/<invoke-db-worker :thread-api/rtc-stop))
(defn <rtc-update-presence!
[_editing-block-uuid]
(p/resolved nil))
(defn <rtc-branch-graph!
[repo]
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
token (state/get-auth-id-token)]
(state/<invoke-db-worker :thread-api/rtc-async-branch-graph repo token)))
(defn notification-download-higher-schema-graph!
[graph-name graph-uuid schema-version]
(let [graph-name* (str graph-name "-" schema-version)]
(notification/show!
[:div "There's a higher schema-version graph on the server."
(shui/button
{:on-click
(fn [e]
(util/stop e)
(<rtc-download-graph! graph-name* graph-uuid schema-version 60000))}
"Download")]
:warning false)))
(declare <rtc-start!)
(defn- notification-upload-higher-schema-graph!
[repo]
(notification/show!
[:div.flex.flex-col.gap-2
[:div "The local graph has a higher schema version than the graph on the server."]
[:div
(shui/button
{:size :sm
:on-click
(fn [e]
(util/stop e)
(p/do! (<rtc-branch-graph! repo)
(rtc-flows/trigger-rtc-start repo)))}
"Upload to server")]]
:warning false))
(defn <rtc-get-users-info
[]
(when-let [graph-uuid (ldb/get-graph-rtc-uuid (db/get-db))]
(p/let [token (state/get-auth-id-token)
repo (state/get-current-repo)
result (state/<invoke-db-worker :thread-api/rtc-get-users-info token graph-uuid)]
(state/set-state! :rtc/users-info {repo result}))))
(defn <rtc-start!
[repo & {:keys [stop-before-start?] :or {stop-before-start? true}}]
(p/let [graph-uuid (state/<invoke-db-worker :thread-api/get-rtc-graph-uuid repo)]
(if-not graph-uuid
(log/info :skip-<rtc-start! ["graph-uuid not found" repo])
(->
(p/do!
(js/Promise. user-handler/task--ensure-id&access-token)
(state/<invoke-db-worker :thread-api/rtc-start stop-before-start?))
(p/catch
(fn [ex]
(let [ex-data* (ex-data ex)]
(case (:type ex-data*)
(:rtc.exception/not-rtc-graph
:rtc.exception/not-found-db-conn)
(notification/show! (ex-message ex) :error)
:rtc.exception/major-schema-version-mismatched
(case (:sub-type ex-data*)
:download
(notification-download-higher-schema-graph! repo graph-uuid (:remote ex-data*))
:create-branch
(notification-upload-higher-schema-graph! repo)
;; else
(do (log/info :start-ex ex)
(notification/show! [:div
[:div (ex-message ex)]
[:div (-> ex-data*
(select-keys [:app :local :remote])
pp/pprint
with-out-str)]]
:error)))
:rtc.exception/lock-failed nil
;; else
nil))))))))
(defn <get-remote-graphs
[]
(->
(p/let [_ (state/set-state! :rtc/loading-graphs? true)
_ (js/Promise. user-handler/task--ensure-id&access-token)
token (state/get-auth-id-token)
graphs (state/<invoke-db-worker :thread-api/rtc-get-graphs token)
result (->> graphs
(remove (fn [graph] (= (:graph-status graph) "deleting")))
(mapv (fn [graph]
(merge
(let [url (str config/db-version-prefix (:graph-name graph))]
{:url url
:GraphName (:graph-name graph)
:GraphSchemaVersion (:graph-schema-version graph)
:GraphUUID (:graph-uuid graph)
:rtc-graph? true})
(dissoc graph :graph-uuid :graph-name)))))]
(state/set-state! :rtc/graphs result)
(repo-handler/refresh-repos!))
(p/finally
(fn []
(state/set-state! :rtc/loading-graphs? false)))))
(defn <rtc-invite-email
[graph-uuid email]
(let [token (state/get-auth-id-token)
user-uuid (user-handler/user-uuid)]
(when (and user-uuid token)
(->
(p/do!
(state/<invoke-db-worker :thread-api/rtc-grant-graph-access
token (str graph-uuid) user-uuid email)
(notification/show! "Invitation sent!" :success))
(p/catch (fn [e]
(notification/show! "Something wrong, please try again." :error)
(js/console.error e)))))))

View File

@@ -6,7 +6,6 @@
[frontend.db :as db]
[frontend.handler.db-based.rtc-flows :as rtc-flows]
[frontend.handler.db-based.sync :as rtc-handler]
[frontend.handler.notification :as notification]
[frontend.state :as state]
[lambdaisland.glogi :as log]
[logseq.common.util :as common-util]
@@ -32,23 +31,24 @@
(log/info :trying-to-restart-rtc graph-uuid :t (t/now))
(c.m/<? (rtc-handler/<rtc-start! (state/get-current-repo) :stop-before-start? false)))))))
(run-background-task-when-not-publishing
::notify-client-need-upgrade-when-larger-remote-schema-version-exists
(m/reduce
(constantly nil)
(m/ap
(let [{:keys [repo graph-uuid remote-schema-version sub-type]}
(m/?>
(m/eduction
(filter #(keyword-identical? :rtc.log/higher-remote-schema-version-exists (:type %)))
rtc-flows/rtc-log-flow))]
(case sub-type
:download
(rtc-handler/notification-download-higher-schema-graph! repo graph-uuid remote-schema-version)
(comment
(run-background-task-when-not-publishing
::notify-client-need-upgrade-when-larger-remote-schema-version-exists
(m/reduce
(constantly nil)
(m/ap
(let [{:keys [repo graph-uuid remote-schema-version sub-type]}
(m/?>
(m/eduction
(filter #(keyword-identical? :rtc.log/higher-remote-schema-version-exists (:type %)))
rtc-flows/rtc-log-flow))]
(case sub-type
:download
(rtc-handler/notification-download-higher-schema-graph! repo graph-uuid remote-schema-version)
;; else
(notification/show!
"The server has a graph with a higher schema version, the client may need to upgrade."
:warning))))))
(notification/show!
"The server has a graph with a higher schema version, the client may need to upgrade."
:warning)))))))
(run-background-task-when-not-publishing
;; stop rtc when [graph-switch user-logout]

View File

@@ -1,7 +1,6 @@
(ns frontend.handler.db-based.rtc-flows
"Flows related to RTC"
(:require [frontend.common.missionary :as c.m]
[frontend.common.thread-api :as thread-api :refer [def-thread-api]]
[frontend.flows :as flows]
[frontend.mobile.flows :as mobile-flows]
[frontend.state :as state]
@@ -87,10 +86,6 @@ conditions:
(assert (some? repo))
(reset! *rtc-start-trigger repo))
(def-thread-api :thread-api/rtc-start-request
[repo]
(trigger-rtc-start repo))
(def ^:private document-visible&rtc-not-running-flow
(m/ap
(let [visibility (m/?< flows/document-visibility-state-flow)]

View File

@@ -1,88 +1,405 @@
(ns frontend.handler.db-based.sync
"Dispatch RTC calls between legacy RTC and db-sync implementations."
(:require [frontend.config :as config]
[frontend.handler.db-based.db-sync :as db-sync-handler]
[frontend.handler.db-based.rtc :as rtc-handler]
"DB-sync handler based on Cloudflare Durable Objects."
(:require [clojure.string :as string]
[frontend.config :as config]
[frontend.db :as db]
[frontend.handler.notification :as notification]
[frontend.handler.repo :as repo-handler]
[frontend.handler.user :as user-handler]
[frontend.state :as state]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db-sync.malli-schema :as db-sync-schema]
[logseq.db.sqlite.util :as sqlite-util]
[promesa.core :as p]))
(defn- db-sync-enabled? []
config/db-sync-enabled?)
(defn- ws->http-base [ws-url]
(when (string? ws-url)
(let [base (cond
(string/starts-with? ws-url "wss://")
(str "https://" (subs ws-url (count "wss://")))
(defn <rtc-create-graph! [repo]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-create-graph! repo)
(rtc-handler/<rtc-create-graph! repo)))
(string/starts-with? ws-url "ws://")
(str "http://" (subs ws-url (count "ws://")))
(defn <rtc-delete-graph! [graph-uuid schema-version]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-delete-graph! graph-uuid schema-version)
(rtc-handler/<rtc-delete-graph! graph-uuid schema-version)))
:else ws-url)
base (string/replace base #"/sync/%s$" "")]
base)))
(defn <rtc-download-graph! [graph-name graph-uuid graph-schema-version timeout-ms]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-download-graph! graph-name graph-uuid graph-schema-version)
(rtc-handler/<rtc-download-graph! graph-name graph-uuid graph-schema-version timeout-ms)))
(defn http-base []
(or config/db-sync-http-base
(ws->http-base config/db-sync-ws-url)))
(defn <rtc-stop! []
(if (db-sync-enabled?)
(db-sync-handler/<rtc-stop!)
(rtc-handler/<rtc-stop!)))
(def ^:private snapshot-text-decoder (js/TextDecoder.))
(defn- ->uint8 [data]
(cond
(instance? js/Uint8Array data) data
(instance? js/ArrayBuffer data) (js/Uint8Array. data)
(string? data) (.encode (js/TextEncoder.) data)
:else (js/Uint8Array. data)))
(defn- decode-snapshot-rows [payload]
(sqlite-util/read-transit-str (.decode snapshot-text-decoder (->uint8 payload))))
(defn- frame-len [^js data offset]
(let [view (js/DataView. (.-buffer data) offset 4)]
(.getUint32 view 0 false)))
(defn- concat-bytes
[^js a ^js b]
(cond
(nil? a) b
(nil? b) a
:else
(let [out (js/Uint8Array. (+ (.-byteLength a) (.-byteLength b)))]
(.set out a 0)
(.set out b (.-byteLength a))
out)))
(defn- parse-framed-chunk
[buffer chunk]
(let [data (concat-bytes buffer chunk)
total (.-byteLength data)]
(loop [offset 0
rows []]
(if (< (- total offset) 4)
{:rows rows
:buffer (when (< offset total)
(.slice data offset total))}
(let [len (frame-len data offset)
next-offset (+ offset 4 len)]
(if (<= next-offset total)
(let [payload (.slice data (+ offset 4) next-offset)
decoded (decode-snapshot-rows payload)]
(recur next-offset (into rows decoded)))
{:rows rows
:buffer (.slice data offset total)}))))))
(defn- finalize-framed-buffer
[buffer]
(if (or (nil? buffer) (zero? (.-byteLength buffer)))
[]
(let [{:keys [rows buffer]} (parse-framed-chunk nil buffer)]
(if (and (seq rows) (or (nil? buffer) (zero? (.-byteLength buffer))))
rows
(throw (ex-info "incomplete framed buffer" {:buffer buffer :rows rows}))))))
(defn- auth-headers []
(when-let [token (state/get-auth-id-token)]
{"authorization" (str "Bearer " token)}))
(defn- with-auth-headers [opts]
(if-let [auth (auth-headers)]
(assoc opts :headers (merge (or (:headers opts) {}) auth))
opts))
(declare fetch-json)
(declare coerce-http-response)
(defn fetch-json
[url opts {:keys [response-schema error-schema] :or {error-schema :error}}]
(p/let [resp (js/fetch url (clj->js (with-auth-headers opts)))
text (.text resp)
data (when (seq text) (js/JSON.parse text))]
(if (.-ok resp)
(let [body (js->clj data :keywordize-keys true)
body (if response-schema
(coerce-http-response response-schema body)
body)]
(if (or (nil? response-schema) body)
body
(throw (ex-info "db-sync invalid response"
{:status (.-status resp)
:url url
:body body}))))
(let [body (when data (js->clj data :keywordize-keys true))
body (if error-schema
(coerce-http-response error-schema body)
body)]
(throw (ex-info "db-sync request failed"
{:status (.-status resp)
:url url
:body body}))))))
(def ^:private invalid-coerce ::invalid-coerce)
(defn- coerce
[coercer value context]
(try
(coercer value)
(catch :default e
(log/error :db-sync/malli-coerce-failed (merge context {:error e :value value}))
invalid-coerce)))
(defn- coerce-http-request [schema-key body]
(if-let [coercer (get db-sync-schema/http-request-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :request})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn- coerce-http-response [schema-key body]
(if-let [coercer (get db-sync-schema/http-response-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :response})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn <rtc-start!
[repo & {:keys [_stop-before-start?] :as _opts}]
(log/info :db-sync/start {:repo repo})
(state/<invoke-db-worker :thread-api/db-sync-start repo))
(defn <rtc-stop!
[]
(log/info :db-sync/stop true)
(state/<invoke-db-worker :thread-api/db-sync-stop))
(defn <rtc-update-presence!
[editing-block-uuid]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-update-presence! editing-block-uuid)
(rtc-handler/<rtc-update-presence! editing-block-uuid)))
(state/<invoke-db-worker :thread-api/db-sync-update-presence editing-block-uuid))
(defn notification-download-higher-schema-graph! [graph-name graph-uuid schema-version]
(rtc-handler/notification-download-higher-schema-graph! graph-name graph-uuid schema-version))
(defn <rtc-get-users-info
[]
(when-let [graph-uuid (ldb/get-graph-rtc-uuid (db/get-db))]
(let [base (http-base)
repo (state/get-current-repo)]
(if base
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
resp (fetch-json (str base "/graphs/" graph-uuid "/members")
{:method "GET"}
{:response-schema :graph-members/list})
members (:members resp)
users (mapv (fn [{:keys [user-id role email username]}]
(let [name (or username email user-id)
user-type (some-> role keyword)]
(cond-> {:user/uuid user-id
:user/name name
:graph<->user/user-type user-type}
(string? email) (assoc :user/email email))))
members)]
(state/set-state! :rtc/users-info {repo users}))
(p/resolved nil)))))
(defn <rtc-get-users-info []
(if (db-sync-enabled?)
(db-sync-handler/<rtc-get-users-info)
(rtc-handler/<rtc-get-users-info)))
(defn <rtc-create-graph!
[repo]
(let [schema-version (some-> (ldb/get-graph-schema-version (db/get-db)) :major str)
base (http-base)]
(if base
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
body (coerce-http-request :graphs/create
{:graph-name (string/replace repo config/db-version-prefix "")
:schema-version schema-version})
result (if (nil? body)
(p/rejected (ex-info "db-sync invalid create-graph body"
{:repo repo}))
(fetch-json (str base "/graphs")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :graphs/create}))
graph-id (:graph-id result)]
(if graph-id
(p/do!
(ldb/transact! repo [(sqlite-util/kv :logseq.kv/db-type "db")
(sqlite-util/kv :logseq.kv/graph-uuid (uuid graph-id))
(sqlite-util/kv :logseq.kv/graph-rtc-e2ee? true)])
graph-id)
(p/rejected (ex-info "db-sync missing graph id in create response"
{:type :db-sync/invalid-graph
:response result}))))
(p/rejected (ex-info "db-sync missing graph info"
{:type :db-sync/invalid-graph
:base base})))))
(defn <rtc-start!
[repo & {:keys [stop-before-start?] :or {stop-before-start? true}}]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-start! repo :stop-before-start? stop-before-start?)
(rtc-handler/<rtc-start! repo :stop-before-start? stop-before-start?)))
(defn <rtc-delete-graph!
[graph-uuid _schema-version]
(let [base (http-base)]
(if (and graph-uuid base)
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)]
(fetch-json (str base "/graphs/" graph-uuid)
{:method "DELETE"}
{:response-schema :graphs/delete}))
(p/rejected (ex-info "db-sync missing graph id"
{:type :db-sync/invalid-graph
:graph-uuid graph-uuid
:base base})))))
(defn <rtc-upload-graph! [repo token remote-graph-name]
(if (db-sync-enabled?)
(p/let [graph-id (db-sync-handler/<rtc-create-graph! repo)]
(when (nil? graph-id)
(throw (ex-info "graph id doesn't exist when uploading to server" {:repo repo})))
(p/do!
(state/<invoke-db-worker :thread-api/db-sync-upload-graph repo)
(<rtc-start! repo)))
(state/<invoke-db-worker :thread-api/rtc-async-upload-graph
repo token remote-graph-name)))
(defn <rtc-download-graph!
[graph-name graph-uuid]
(state/set-state! :rtc/downloading-graph-uuid graph-uuid)
(let [base (http-base)]
(-> (if (and graph-uuid base)
(let [download-url* (atom nil)]
(-> (p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
graph (str config/db-version-prefix graph-name)
pull-resp (fetch-json (str base "/sync/" graph-uuid "/pull")
{:method "GET"}
{:response-schema :sync/pull})
remote-tx (:t pull-resp)
_ (when-not (integer? remote-tx)
(throw (ex-info "non-integer remote-tx when downloading graph"
{:graph graph-name
:remote-tx remote-tx})))
download-resp (fetch-json (str base "/sync/" graph-uuid "/snapshot/download")
{:method "GET"}
{:response-schema :sync/snapshot-download})
download-url (:url download-resp)
_ (reset! download-url* download-url)
_ (when-not (string? download-url)
(throw (ex-info "missing snapshot download url"
{:graph graph-name
:response download-resp})))
resp (js/fetch download-url (clj->js (with-auth-headers {:method "GET"})))
total-bytes (when-let [raw (some-> resp .-headers (.get "content-length"))]
(let [parsed (js/parseInt raw 10)]
(when-not (js/isNaN parsed) parsed)))
_ (state/pub-event!
[:rtc/log {:type :rtc.log/download
:sub-type :download-progress
:graph-uuid graph-uuid
:message (str "Start downloading graph snapshot, file size: " total-bytes)}])]
(when-not (.-ok resp)
(throw (ex-info "snapshot download failed"
{:graph graph-name
:status (.-status resp)})))
(when-not (.-body resp)
(throw (ex-info "snapshot download missing body"
{:graph graph-name})))
(p/let [reader (.getReader (.-body resp))]
(p/loop [buffer nil
total 0
total-rows []
loaded 0]
(p/let [chunk (.read reader)]
(if (.-done chunk)
(let [rows (finalize-framed-buffer buffer)
total' (+ total (count rows))
total-rows' (into total-rows rows)]
(state/pub-event!
[:rtc/log {:type :rtc.log/download
:sub-type :download-completed
:graph-uuid graph-uuid
:message "Graph snapshot downloaded"}])
(when (seq total-rows')
(state/<invoke-db-worker :thread-api/db-sync-import-kvs-rows
graph total-rows' true graph-uuid remote-tx))
total')
(let [value (.-value chunk)
loaded' (+ loaded (.-byteLength value))
{:keys [rows buffer]} (parse-framed-chunk buffer value)
total' (+ total (count rows))]
(p/recur buffer total' (into total-rows rows) loaded')))))))
(p/finally
(fn []
(when-let [download-url @download-url*]
(js/fetch download-url (clj->js (with-auth-headers {:method "DELETE"}))))))))
(p/rejected (ex-info "db-sync missing graph info"
{:type :db-sync/invalid-graph
:graph-uuid graph-uuid
:base base})))
(p/catch (fn [error]
(throw error)))
(p/finally
(fn []
(state/set-state! :rtc/downloading-graph-uuid nil))))))
(defn <get-remote-graphs []
(if (db-sync-enabled?)
(db-sync-handler/<get-remote-graphs)
(rtc-handler/<get-remote-graphs)))
(defn <get-remote-graphs
[]
(let [base (http-base)]
(if-not base
(p/resolved [])
(-> (p/let [_ (state/set-state! :rtc/loading-graphs? true)
_ (js/Promise. user-handler/task--ensure-id&access-token)
resp (fetch-json (str base "/graphs")
{:method "GET"}
{:response-schema :graphs/list})
graphs (:graphs resp)
result (mapv (fn [graph]
(merge
{:url (str config/db-version-prefix (:graph-name graph))
:GraphName (:graph-name graph)
:GraphSchemaVersion (:schema-version graph)
:GraphUUID (:graph-id graph)
:rtc-graph? true
:graph<->user-user-type (:role graph)
:graph<->user-grant-by-user (:invited-by graph)}
(dissoc graph :graph-id :graph-name :schema-version :role :invited-by)))
graphs)]
(state/set-state! :rtc/graphs result)
(repo-handler/refresh-repos!)
result)
(p/finally
(fn []
(state/set-state! :rtc/loading-graphs? false)))))))
(defn <rtc-invite-email [graph-uuid email]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-invite-email graph-uuid email)
(rtc-handler/<rtc-invite-email graph-uuid email)))
(defn <rtc-invite-email
[graph-uuid email]
(let [base (http-base)
graph-uuid (str graph-uuid)]
(if (and base (string? graph-uuid) (string? email))
(->
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)
body (coerce-http-request :graph-members/create
{:email email
:role "member"})
_ (when (nil? body)
(throw (ex-info "db-sync invalid invite body"
{:graph-uuid graph-uuid
:email email})))
_ (fetch-json (str base "/graphs/" graph-uuid "/members")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :graph-members/create})
repo (state/get-current-repo)
e2ee? (ldb/get-graph-rtc-e2ee? (db/get-db))
_ (when (and repo e2ee?)
(state/<invoke-db-worker :thread-api/db-sync-grant-graph-access
repo graph-uuid email))]
(notification/show! "Invitation sent!" :success))
(p/catch (fn [e]
(notification/show! "Something wrong, please try again." :error)
(log/error :db-sync/invite-email-failed
{:error e
:graph-uuid graph-uuid
:email email}))))
(p/rejected (ex-info "db-sync missing invite info"
{:type :db-sync/invalid-invite
:graph-uuid graph-uuid
:email email
:base base})))))
(defn <rtc-remove-member!
[graph-uuid member-id]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-remove-member! graph-uuid member-id)
(p/rejected (ex-info "RTC remove member not supported"
{:type :rtc/unsupported-remove-member
:graph-uuid graph-uuid
:member-id member-id}))))
(let [base (http-base)
graph-uuid (some-> graph-uuid str)
member-id (some-> member-id str)]
(if (and base (string? graph-uuid) (string? member-id))
(p/let [_ (js/Promise. user-handler/task--ensure-id&access-token)]
(fetch-json (str base "/graphs/" graph-uuid "/members/" member-id)
{:method "DELETE"}
{:response-schema :graph-members/delete}))
(p/rejected (ex-info "db-sync missing member info"
{:type :db-sync/invalid-member
:graph-uuid graph-uuid
:member-id member-id
:base base})))))
(defn <rtc-leave-graph!
[graph-uuid]
(if (db-sync-enabled?)
(db-sync-handler/<rtc-leave-graph! graph-uuid)
(p/rejected (ex-info "RTC leave graph not supported"
{:type :rtc/unsupported-leave-graph
(if-let [member-id (user-handler/user-uuid)]
(<rtc-remove-member! graph-uuid member-id)
(p/rejected (ex-info "db-sync missing user id"
{:type :db-sync/invalid-member
:graph-uuid graph-uuid}))))
(defn <rtc-upload-graph! [repo _token _remote-graph-name]
(p/let [graph-id (<rtc-create-graph! repo)]
(when (nil? graph-id)
(throw (ex-info "graph id doesn't exist when uploading to server" {:repo repo})))
(p/do!
(state/<invoke-db-worker :thread-api/db-sync-upload-graph repo)
(<rtc-start! repo))))

View File

@@ -337,7 +337,7 @@
[:div (str "Downloading " graph-name " ...")]
(indicator/downloading-logs)])
{:id :download-rtc-graph}))
(rtc-handler/<rtc-download-graph! graph-name graph-uuid graph-schema-version 60000)
(rtc-handler/<rtc-download-graph! graph-name graph-uuid)
(rtc-handler/<get-remote-graphs)
(when (util/mobile?)
(shui/popup-hide! :download-rtc-graph)))

View File

@@ -7,7 +7,6 @@
[frontend.handler.assets :as assets-handler]
[frontend.handler.export.common :as export-common-handler]
[frontend.handler.notification :as notification]
[frontend.handler.user :as user-handler]
[frontend.idb :as idb]
[frontend.persist-db :as persist-db]
[frontend.state :as state]
@@ -90,24 +89,6 @@
(p/catch (fn [error]
(js/console.error error)))))
(defn export-repo-as-debug-log-sqlite!
[repo]
(if-not (and (state/get-auth-id-token) (user-handler/rtc-group?))
(notification/show! "Debug log export is limited to team members." :warning)
(->
(p/let [data (state/<invoke-db-worker-direct-pass :thread-api/export-debug-log-db repo)]
(if-not data
(notification/show! "Debug log db is not available for this graph." :warning)
(let [filename (file-name (str repo "_debug-log") "sqlite")
url (js/URL.createObjectURL (js/Blob. #js [data]))
anchor (.createElement js/document "a")]
(set! (.-href anchor) url)
(set! (.-download anchor) filename)
(.click anchor)
(js/URL.revokeObjectURL url))))
(p/catch (fn [error]
(js/console.error error))))))
;;;;;;;;;;;;;;;;;;;;;;;;;
;; Export to roam json ;;
;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -16,6 +16,7 @@
[goog.crypt :as crypt]
[goog.crypt.Hmac]
[goog.crypt.Sha256]
[goog.crypt.base64 :as base64]
[missionary.core :as m]))
;;; userinfo, token, login/logout, ...
@@ -31,7 +32,7 @@
(some-> jwt
(string/split ".")
second
(#(.decodeString ^js crypt/base64 % true))
(#(base64/decodeString % true))
js/JSON.parse
(js->clj :keywordize-keys true)
(update :cognito:username decode-username)))
@@ -212,7 +213,7 @@
key (.encode text-encoder client-secret)
hasher (new crypt/Sha256)
hmacer (new crypt/Hmac hasher key)
secret-hash (.encodeByteArray ^js crypt/base64 (.getHmac hmacer (str username' client-id)))
secret-hash (base64/encodeByteArray (.getHmac hmacer (str username' client-id)))
payload {"AuthParameters" {"USERNAME" username',
"PASSWORD" password,
"SECRET_HASH" secret-hash}

View File

@@ -139,9 +139,9 @@
(Comlink/expose #js{"remoteInvoke" thread-api/remote-function} worker)
(worker-handler/handle-message! worker wrapped-worker)
(reset! state/*db-worker wrapped-worker)
(-> (p/let [_ (state/<invoke-db-worker :thread-api/init config/RTC-WS-URL)
(-> (p/let [_ (state/<invoke-db-worker :thread-api/init)
_ (state/<invoke-db-worker :thread-api/set-db-sync-config
{:enabled? config/db-sync-enabled?
{:enabled? true
:ws-url config/db-sync-ws-url
:http-base config/db-sync-http-base})
_ (sync-app-state!)

View File

@@ -3,12 +3,11 @@
(:require [clojure.string :as string]
[datascript.core :as d]
[frontend.common.thread-api :as thread-api]
[frontend.worker.db-sync :as db-sync]
[frontend.worker.pipeline :as worker-pipeline]
[frontend.worker.rtc.gen-client-op :as gen-client-op]
[frontend.worker.search :as search]
[frontend.worker.shared-service :as shared-service]
[frontend.worker.state :as worker-state]
[frontend.worker.sync :as db-sync]
[logseq.common.util :as common-util]
[logseq.db :as ldb]
[logseq.outliner.batch-tx :as batch-tx]
@@ -86,8 +85,7 @@
(let [*batch-all-txs (volatile! [])
get-batch-txs #(->> @*batch-all-txs
(sort-by :tx)
(common-util/distinct-by-last-wins (fn [[e a v _tx added]] [e a v added])))
additional-args gen-client-op/group-datoms-by-entity]
(common-util/distinct-by-last-wins (fn [[e a v _tx added]] [e a v added])))]
(d/listen! conn ::listen-db-changes!
(fn listen-db-changes!-inner
[{:keys [tx-data _db-before _db-after tx-meta] :as tx-report}]
@@ -116,7 +114,7 @@
tx-report' (if sync-db-to-main-thread?
(sync-db-to-main-thread repo conn tx-report)
tx-report)
opt (assoc (additional-args (:tx-data tx-report')) :repo repo)]
opt {:repo repo}]
(doseq [[k handler-fn] handlers]
(handler-fn k opt tx-report'))))
@@ -125,6 +123,6 @@
(let [tx-report' (if sync-db-to-main-thread?
(sync-db-to-main-thread repo conn tx-report)
tx-report)
opt (assoc (additional-args (:tx-data tx-report')) :repo repo)]
opt {:repo repo}]
(doseq [[k handler-fn] handlers]
(handler-fn k opt tx-report'))))))))))

View File

@@ -1,17 +0,0 @@
(ns frontend.worker.db-metadata
"Fns to read/write metadata.edn file for db-based."
(:require ["/frontend/idbkv" :as idb-keyval]))
(defonce ^:private store (delay (idb-keyval/newStore "localforage" "keyvaluepairs" 2)))
(defn- gen-key
[repo]
(str "metadata###" repo))
(defn <store
[repo metadata-str]
(idb-keyval/set (gen-key repo) metadata-str @store))
(defn <get
[repo]
(idb-keyval/get (gen-key repo) @store))

View File

@@ -4,7 +4,6 @@
["comlink" :as Comlink]
[cljs-bean.core :as bean]
[cljs.cache :as cache]
[clojure.edn :as edn]
[clojure.set]
[clojure.string :as string]
[datascript.core :as d]
@@ -15,8 +14,6 @@
[frontend.common.thread-api :as thread-api :refer [def-thread-api]]
[frontend.worker-common.util :as worker-util]
[frontend.worker.db-listener :as db-listener]
[frontend.worker.db-metadata :as worker-db-metadata]
[frontend.worker.db-sync :as db-sync]
[frontend.worker.db.fix :as db-fix]
[frontend.worker.db.migrate :as db-migrate]
[frontend.worker.db.validate :as worker-db-validate]
@@ -25,16 +22,14 @@
[frontend.worker.handler.page :as worker-page]
[frontend.worker.pipeline :as worker-pipeline]
[frontend.worker.publish]
[frontend.worker.rtc.asset-db-listener]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.core :as rtc.core]
[frontend.worker.rtc.db-listener]
[frontend.worker.rtc.debug-log :as rtc-debug-log]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.rtc.migrate :as rtc-migrate]
[frontend.worker.search :as search]
[frontend.worker.shared-service :as shared-service]
[frontend.worker.state :as worker-state]
[frontend.worker.sync :as db-sync]
[frontend.worker.sync.asset-db-listener]
[frontend.worker.sync.client-op :as client-op]
[frontend.worker.sync.crypt :as sync-crypt]
[frontend.worker.sync.log-and-state :as rtc-log-and-state]
[frontend.worker.thread-atom]
[frontend.worker.undo-redo :as undo-validate]
[goog.object :as gobj]
@@ -100,7 +95,6 @@
nil)))
(def repo-path "/db.sqlite")
(def debug-log-path "/debug-log/db.sqlite")
(defn- <export-db-file
([repo]
@@ -186,28 +180,27 @@
(restore-data-from-addr db addr))))
(defn- close-db-aux!
[repo ^Object db ^Object search ^Object client-ops ^Object debug-log]
[repo ^Object db ^Object search ^Object client-ops]
(swap! *sqlite-conns dissoc repo)
(swap! *datascript-conns dissoc repo)
(swap! *client-ops-conns dissoc repo)
(when db (.close db))
(when search (.close search))
(when client-ops (.close client-ops))
(when debug-log (.close debug-log))
(when-let [^js pool (worker-state/get-opfs-pool repo)]
(.pauseVfs pool))
(swap! *opfs-pools dissoc repo))
(defn- close-other-dbs!
[repo]
(doseq [[r {:keys [db search client-ops debug-log]}] @*sqlite-conns]
(doseq [[r {:keys [db search client-ops]}] @*sqlite-conns]
(when-not (= repo r)
(close-db-aux! r db search client-ops debug-log))))
(close-db-aux! r db search client-ops))))
(defn close-db!
[repo]
(let [{:keys [db search client-ops debug-log]} (get @*sqlite-conns repo)]
(close-db-aux! repo db search client-ops debug-log)))
(let [{:keys [db search client-ops]} (get @*sqlite-conns repo)]
(close-db-aux! repo db search client-ops)))
(defn reset-db!
[repo db-transit-str]
@@ -232,13 +225,12 @@
(.unpauseVfs pool))
db (new (.-OpfsSAHPoolDb pool) repo-path)
search-db (new (.-OpfsSAHPoolDb pool) (str "search" repo-path))
client-ops-db (new (.-OpfsSAHPoolDb pool) (str "client-ops-" repo-path))
debug-log-db (new (.-OpfsSAHPoolDb pool) (str "debug-log" repo-path))]
[db search-db client-ops-db debug-log-db])))
client-ops-db (new (.-OpfsSAHPoolDb pool) (str "client-ops-" repo-path))]
[db search-db client-ops-db])))
(defn- gc-sqlite-dbs!
"Gc main db weekly and rtc ops db each time when opening it"
[sqlite-db client-ops-db debug-log-db datascript-conn {:keys [full-gc?]}]
[sqlite-db client-ops-db datascript-conn {:keys [full-gc?]}]
(let [last-gc-at (:kv/value (d/entity @datascript-conn :logseq.kv/graph-last-gc-at))]
(when (or full-gc?
(nil? last-gc-at)
@@ -248,27 +240,24 @@
(doseq [db (if @*publishing? [sqlite-db] [sqlite-db client-ops-db])]
(sqlite-gc/gc-kvs-table! db {:full-gc? full-gc?})
(.exec db "VACUUM"))
(rtc-debug-log/gc! debug-log-db)
(ldb/transact! datascript-conn [{:db/ident :logseq.kv/graph-last-gc-at
:kv/value (common-util/time-ms)}]))))
(defn- <create-or-open-db!
[repo {:keys [config datoms] :as opts}]
(when-not (worker-state/get-sqlite-conn repo)
(p/let [[db search-db client-ops-db debug-log-db :as dbs] (get-dbs repo)
(p/let [[db search-db client-ops-db :as dbs] (get-dbs repo)
storage (new-sqlite-storage db)
client-ops-storage (when-not @*publishing?
(new-sqlite-storage client-ops-db))
db-based? true]
(swap! *sqlite-conns assoc repo {:db db
:search search-db
:client-ops client-ops-db
:debug-log debug-log-db})
:client-ops client-ops-db})
(doseq [db' dbs]
(enable-sqlite-wal-mode! db'))
(common-sqlite/create-kvs-table! db)
(when-not @*publishing? (common-sqlite/create-kvs-table! client-ops-db))
(rtc-debug-log/create-tables! debug-log-db)
(search/create-tables-and-triggers! search-db)
(ldb/register-transact-pipeline-fn! worker-pipeline/transact-pipeline)
(let [conn (common-sqlite/get-storage-conn storage db-schema/schema)
@@ -303,12 +292,9 @@
initial-data (sqlite-create-graph/build-db-initial-data
config (select-keys opts [:import-type :graph-git-sha]))]
(ldb/transact! conn initial-data {:initial-db? true})))]
(gc-sqlite-dbs! db client-ops-db debug-log-db conn {})
(db-migrate/migrate conn)
(let [migration-result (db-migrate/migrate conn)]
(when (client-op/rtc-db-graph? repo)
(let [client-ops (rtc-migrate/migration-results=>client-ops migration-result)]
(client-op/add-ops! repo client-ops))))
(gc-sqlite-dbs! db client-ops-db conn {})
(when initial-tx-report
(db-sync/handle-local-tx! repo initial-tx-report))
@@ -360,11 +346,8 @@
(string/replace-first ".logseq-pool-" "")
;; TODO: DRY
(string/replace "+3A+" ":")
(string/replace "++" "/"))
repo (str sqlite-util/db-version-prefix graph-name)
metadata (worker-db-metadata/<get repo)]
{:name graph-name
:metadata (edn/read-string metadata)})) db-dirs)))))
(string/replace "++" "/"))]
{:name graph-name})) db-dirs)))))
(def-thread-api :thread-api/list-db
[]
@@ -396,8 +379,7 @@
(.-version sqlite))))
(def-thread-api :thread-api/init
[rtc-ws-url]
(reset! worker-state/*rtc-ws-url rtc-ws-url)
[]
(init-sqlite-module!))
(def-thread-api :thread-api/set-db-sync-config
@@ -423,11 +405,11 @@
(def-thread-api :thread-api/db-sync-grant-graph-access
[repo graph-id target-email]
(db-sync/<grant-graph-access! repo graph-id target-email))
(sync-crypt/<grant-graph-access! repo graph-id target-email))
(def-thread-api :thread-api/db-sync-ensure-user-rsa-keys
[]
(db-sync/ensure-user-rsa-keys!))
(sync-crypt/ensure-user-rsa-keys!))
(def-thread-api :thread-api/db-sync-upload-graph
[repo]
@@ -643,7 +625,7 @@
(def-thread-api :thread-api/db-sync-import-kvs-rows
[repo rows reset? graph-id remote-tx]
(p/let [_ (when reset? (close-db! repo))
aes-key (db-sync/<fetch-graph-aes-key-for-download repo graph-id)
aes-key (sync-crypt/<fetch-graph-aes-key-for-download repo graph-id)
_ (when (nil? aes-key)
(db-sync/fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))
db (ensure-db-sync-import-db! repo reset?)
@@ -654,7 +636,7 @@
:message "Start decrypting data"})
;; sequential batches: low memory
(p/doseq [[i batch] batches]
(p/let [dec-rows (db-sync/<decrypt-snapshot-rows-batch aes-key batch)]
(p/let [dec-rows (sync-crypt/<decrypt-snapshot-rows-batch aes-key batch)]
(upsert-addr-content! db (rows->sqlite-binds dec-rows))
(rtc-log-and-state/rtc-log :rtc.log/download
{:sub-type :download-progress
@@ -683,23 +665,6 @@
(p/let [data (<export-db-file repo)]
(Comlink/transfer data #js [(.-buffer data)])))
(def-thread-api :thread-api/export-debug-log-db
[repo]
(when-let [^js db (worker-state/get-sqlite-conn repo :debug-log)]
(.exec db "PRAGMA wal_checkpoint(2)"))
(-> (p/let [data (<export-db-file
repo
debug-log-path)]
(when data
(Comlink/transfer data #js [(.-buffer data)])))
(p/catch (fn [error]
(throw error)))))
(def-thread-api :thread-api/reset-debug-log-db
[repo]
(when-let [^js db (worker-state/get-sqlite-conn repo :debug-log)]
(rtc-debug-log/reset-tables! db)))
(def-thread-api :thread-api/import-db
[repo data]
(when-not (string/blank? repo)
@@ -842,10 +807,10 @@
(def-thread-api :thread-api/gc-graph
[repo]
(let [{:keys [db client-ops debug-log]} (get @*sqlite-conns repo)
(let [{:keys [db client-ops]} (get @*sqlite-conns repo)
conn (get @*datascript-conns repo)]
(when (and db conn)
(gc-sqlite-dbs! db client-ops debug-log conn {:full-gc? true})
(gc-sqlite-dbs! db client-ops conn {:full-gc? true})
nil)))
(def-thread-api :thread-api/vec-search-embedding-model-info
@@ -955,10 +920,7 @@
(when-not (:import-type start-opts)
(c.m/<? (start-db! repo start-opts))
(assert (some? (worker-state/get-datascript-conn repo))))
;; Don't wait for rtc started because the app will be slow to be ready
;; for users.
(when @worker-state/*rtc-ws-url
(rtc.core/new-task--rtc-start true)))))
nil)))
(def broadcast-data-types
(set (map

View File

@@ -1,11 +0,0 @@
(ns frontend.worker.flows
"common flows in worker thread"
(:require [frontend.worker.state :as worker-state]
[missionary.core :as m]))
(def online-event-flow
(->> (m/watch (get @worker-state/*state :thread-atom/online-event))
(m/eduction (filter true?))))
(comment
((m/reduce (fn [_ x] (prn :xxx x)) online-event-flow) prn prn))

View File

@@ -1,24 +1,6 @@
(ns frontend.worker.handler.page
"Page operations"
(:require [logseq.common.util :as common-util]
[logseq.db :as ldb]
[logseq.graph-parser.block :as gp-block]
[logseq.outliner.page :as outliner-page]))
(defn rtc-create-page!
[conn title date-formatter {:keys [uuid old-db-id]}]
(assert (uuid? uuid) (str "rtc-create-page! `uuid` is not a uuid " uuid))
(let [title (outliner-page/sanitize-title title)
page-name (common-util/page-name-sanity-lc title)
page (cond-> (gp-block/page-name->map title @conn true date-formatter
{:page-uuid uuid
:skip-existing-page-check? true})
old-db-id
(assoc :db/id old-db-id))
result (ldb/transact! conn [page] {:persist-op? false
:outliner-op :create-page
:rtc-op? true})]
[result page-name (:block/uuid page)]))
(:require [logseq.outliner.page :as outliner-page]))
(defn create!
"Create page. Has the following options:

View File

@@ -1,360 +0,0 @@
(ns frontend.worker.rtc.asset
"Fns to sync assets.
some notes:
- has :logseq.property.asset/type, :logseq.property.asset/size, :logseq.property.asset/checksum
- block/title, store the asset name
- an asset-block not having :logseq.property.asset/remote-metadata
indicates need to upload the asset to server"
(:require [clojure.set :as set]
[datascript.core :as d]
[frontend.common.crypt :as crypt]
[frontend.common.missionary :as c.m]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.exception :as r.ex]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.rtc.ws-util :as ws-util]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq.common.path :as path]
[logseq.db :as ldb]
[malli.core :as ma]
[missionary.core :as m]))
(defonce ^:private max-asset-size (* 100 1024 1024))
(defn- create-local-updates-check-flow
"Return a flow that emits value if need to push local-updates"
[repo *auto-push? interval-ms]
(let [auto-push-flow (m/watch *auto-push?)
clock-flow (c.m/clock interval-ms :clock)
merge-flow (m/latest vector auto-push-flow clock-flow)]
(m/eduction (filter first)
(map second)
(filter (fn [v] (when (pos? (client-op/get-unpushed-asset-ops-count repo)) v)))
merge-flow)))
(def ^:private remote-asset-updates-schema
[:sequential
[:map {:closed true}
[:op [:enum :update-asset :remove-asset]]
[:block/uuid :uuid]
[:malli.core/default [:map-of :keyword :any]]]])
(def ^:private *remote-asset-updates (atom nil :validator (ma/validator remote-asset-updates-schema)))
(def ^:private remote-asset-updates-flow (m/buffer 10 (m/watch *remote-asset-updates)))
(comment
(def cancel ((m/reduce (fn [_ v] (prn :v v)) remote-asset-updates-flow) prn prn)))
(defn- new-task--get-asset-file-metadata
"Return nil if this asset not exist"
[repo block-uuid asset-type]
(m/sp
(c.m/<?
(worker-state/<invoke-main-thread :thread-api/get-asset-file-metadata
repo (str block-uuid) asset-type))))
(defn- remote-block-ops=>remote-asset-ops
[db-before db-after remove-ops update-ops]
(concat
(keep
(fn [remove-op]
(let [block-uuid (:block-uuid remove-op)]
(when-let [ent (d/entity db-before [:block/uuid block-uuid])]
(when-let [asset-type (:logseq.property.asset/type ent)]
{:op :remove-asset
:block/uuid block-uuid
:logseq.property.asset/type asset-type}))))
remove-ops)
(keep
(fn [update-op]
(let [block-uuid (:self update-op)]
(when-let [ent (d/entity db-after [:block/uuid block-uuid])]
(let [remote-metadata (:logseq.property.asset/remote-metadata ent)
checksum (:logseq.property.asset/checksum ent)
asset-type (:logseq.property.asset/type ent)]
(when (and remote-metadata checksum asset-type)
{:op :update-asset
:block/uuid block-uuid})))))
update-ops)))
(defn emit-remote-asset-updates-from-block-ops
[db-before db-after remove-ops update-ops]
(when-let [asset-update-ops
(not-empty (remote-block-ops=>remote-asset-ops db-before db-after remove-ops update-ops))]
(reset! *remote-asset-updates asset-update-ops)))
(defn- create-mixed-flow
"Return a flow that emits different events:
- `:local-update-check`: event to notify check if there're some new local-updates on assets
- `:remote-updates`: remote asset updates "
[repo *auto-push?]
(let [remote-update-flow (m/eduction
(map (fn [v] {:type :remote-updates :value v}))
remote-asset-updates-flow)
local-update-check-flow (m/eduction
(map (fn [v] {:type :local-update-check :value v}))
(create-local-updates-check-flow repo *auto-push? 2500))]
(c.m/mix remote-update-flow local-update-check-flow)))
(defonce ^:private *assets-sync-lock (atom nil))
(defn- holding-assets-sync-lock
"Use this to prevent multiple assets-sync loops at same time."
[started-dfv task]
(m/sp
(when-not (compare-and-set! *assets-sync-lock nil true)
(let [e (ex-info "Must not run multiple assets-sync loops"
{:type :assets-sync.exception/lock-failed
:missionary/retry true})]
(started-dfv e)
(throw e)))
(try
(m/? task)
(finally
(reset! *assets-sync-lock nil)))))
(defn- clean-asset-ops!
[repo all-asset-uuids handled-asset-uuids]
(doseq [asset-uuid (set/difference (set all-asset-uuids) (set handled-asset-uuids))]
(client-op/remove-asset-op repo asset-uuid)))
(defn- new-task--concurrent-download-assets
"Concurrently download assets with limited max concurrent count"
[repo aes-key asset-uuid->url asset-uuid->asset-type]
(m/sp
(let [exported-aes-key (when aes-key (c.m/<? (crypt/<export-aes-key aes-key)))]
(m/?
(->> (fn [[asset-uuid url]]
(m/sp
(try
(c.m/<?
(worker-state/<invoke-main-thread :thread-api/rtc-download-asset
repo exported-aes-key (str asset-uuid)
(get asset-uuid->asset-type asset-uuid) url))
(catch :default e
(when-let [edata (ex-data e)]
;; if download-url return 404, ignore this asset
(when (not= 404 (:status (:data edata)))
(log/error :rtc/asset-download-failed
{:repo repo
:asset-uuid asset-uuid
:error e}))) ()))))
(c.m/concurrent-exec-flow 5 (m/seed asset-uuid->url))
(m/reduce (constantly nil)))))))
(defn- new-task--concurrent-upload-assets
"Concurrently upload assets with limited max concurrent count"
[repo conn aes-key asset-uuid->url asset-uuid->asset-metadata]
(m/sp
(let [exported-aes-key (when aes-key (c.m/<? (crypt/<export-aes-key aes-key)))]
(m/?
(->> (fn [[asset-uuid url]]
(m/sp
(let [[asset-type checksum] (get asset-uuid->asset-metadata asset-uuid)
r (try
(c.m/<?
(worker-state/<invoke-main-thread :thread-api/rtc-upload-asset
repo exported-aes-key (str asset-uuid)
asset-type checksum url))
nil
(catch :default e e))]
(case (:type (ex-data r))
:rtc.exception/read-asset-failed ;asset not found, ignore
(client-op/remove-asset-op repo asset-uuid)
:rtc.exception/upload-asset-failed ;upload to remote failed, maybe try later
nil
;; else
(do
;; asset might be deleted by the user before uploaded successfully
(when (d/entity @conn [:block/uuid asset-uuid])
(ldb/transact!
conn
[{:block/uuid asset-uuid
:logseq.property.asset/remote-metadata {:checksum checksum :type asset-type}}]
;; Don't generate rtc ops again, (block-ops & asset-ops)
{:persist-op? false}))
(client-op/remove-asset-op repo asset-uuid))))))
(c.m/concurrent-exec-flow 3 (m/seed asset-uuid->url))
(m/reduce (constantly nil)))))))
(defn- new-task--push-local-asset-updates
[repo get-ws-create-task conn graph-uuid major-schema-version aes-key add-log-fn]
(m/sp
(when-let [asset-ops (not-empty (client-op/get-all-asset-ops repo))]
(let [upload-asset-uuids (keep
(fn [asset-op]
(when (contains? asset-op :update-asset)
(:block/uuid asset-op)))
asset-ops)
remove-asset-uuids (keep
(fn [asset-op]
(when (contains? asset-op :remove-asset)
(:block/uuid asset-op)))
asset-ops)
asset-uuid->asset-metadata
(into {}
(keep
(fn [asset-uuid]
(let [ent (d/entity @conn [:block/uuid asset-uuid])]
(when-let [tp (:logseq.property.asset/type ent)]
(when-let [checksum (:logseq.property.asset/checksum ent)]
(let [size (:logseq.property.asset/size ent 0)]
(if (> size max-asset-size)
(do (add-log-fn :rtc.asset.log/asset-too-large
{:asset-uuid asset-uuid
:asset-name (:block/title ent)
:size size})
nil)
[asset-uuid [tp checksum]])))))))
upload-asset-uuids)
asset-uuid->url
(when (seq asset-uuid->asset-metadata)
(->> (m/? (ws-util/send&recv get-ws-create-task
{:action "get-assets-upload-urls"
:graph-uuid graph-uuid
:asset-uuid->metadata
(into {}
(map (fn [[asset-uuid [asset-type checksum]]]
[asset-uuid {"checksum" checksum "type" asset-type}]))
asset-uuid->asset-metadata)}))
:asset-uuid->url))]
(when (seq asset-uuid->url)
(add-log-fn :rtc.asset.log/upload-assets {:asset-uuids (keys asset-uuid->url)}))
(m/? (new-task--concurrent-upload-assets repo conn aes-key asset-uuid->url asset-uuid->asset-metadata))
(when (seq remove-asset-uuids)
(add-log-fn :rtc.asset.log/remove-assets {:asset-uuids remove-asset-uuids})
(m/? (ws-util/send&recv get-ws-create-task
{:action "delete-assets"
:graph-uuid graph-uuid
:schema-version (str major-schema-version)
:asset-uuids remove-asset-uuids}))
(doseq [asset-uuid remove-asset-uuids]
(client-op/remove-asset-op repo asset-uuid)))
(clean-asset-ops! repo
(map :block/uuid asset-ops)
(concat (keys asset-uuid->url) remove-asset-uuids))))))
(defn- new-task--pull-remote-asset-updates
[repo get-ws-create-task conn graph-uuid aes-key add-log-fn asset-update-ops]
(m/sp
(when (seq asset-update-ops)
(let [update-asset-uuids (keep (fn [op]
(when (= :update-asset (:op op))
(:block/uuid op)))
asset-update-ops)
remove-asset-uuid->asset-type
(into {} (keep (fn [op]
(when (= :remove-asset (:op op))
[(:block/uuid op) (:logseq.property.asset/type op)])))
asset-update-ops)
asset-uuid->asset-type (into {}
(keep (fn [asset-uuid]
(when-let [ent (d/entity @conn [:block/uuid asset-uuid])]
(let [asset-type (:logseq.property.asset/type ent)]
[asset-uuid asset-type]))))
update-asset-uuids)
asset-uuid->url
(when-let [asset-uuids
(->> asset-uuid->asset-type
(map
(fn [[asset-uuid asset-type]]
(m/sp
(when (nil? (m/? (new-task--get-asset-file-metadata repo asset-uuid asset-type)))
asset-uuid))))
(apply m/join vector)
m/?
(remove nil?))]
(->> (m/? (ws-util/send&recv get-ws-create-task
{:action "get-assets-download-urls"
:graph-uuid graph-uuid
:asset-uuids asset-uuids}))
:asset-uuid->url))]
(doseq [[asset-uuid asset-type] remove-asset-uuid->asset-type]
(c.m/<? (worker-state/<invoke-main-thread :thread-api/unlink-asset
repo (str asset-uuid) asset-type)))
(when (seq asset-uuid->url)
(add-log-fn :rtc.asset.log/download-assets {:asset-uuids (keys asset-uuid->url)}))
(m/? (new-task--concurrent-download-assets repo aes-key asset-uuid->url asset-uuid->asset-type))))))
(defn- get-all-asset-blocks
[db]
(d/q '[:find [(pull ?b [:block/uuid
:logseq.property.asset/type
:logseq.property.asset/size
:logseq.property.asset/checksum
:logseq.property.asset/remote-metadata])
...]
:where
[?b :block/uuid]
[?b :logseq.property.asset/type]]
db))
(defn- new-task--initial-download-missing-assets
[repo get-ws-create-task graph-uuid conn aes-key add-log-fn]
(m/sp
(let [local-all-asset-file-paths
(c.m/<? (worker-state/<invoke-main-thread :thread-api/get-all-asset-file-paths repo))
local-all-asset-file-uuids (set (map (comp parse-uuid path/file-stem) local-all-asset-file-paths))
local-all-asset-uuids (into
#{}
;; Only if the asset-block contains :logseq.property.asset/remote-metadata
;; does the asset exist remotely.
(comp (filter :logseq.property.asset/remote-metadata)
(map :block/uuid))
(get-all-asset-blocks @conn))]
(when-let [asset-update-ops
(not-empty
(map (fn [asset-uuid] {:op :update-asset :block/uuid asset-uuid})
(set/difference local-all-asset-uuids local-all-asset-file-uuids)))]
(add-log-fn :rtc.asset.log/initial-download-missing-assets {:count (count asset-update-ops)})
(m/? (new-task--pull-remote-asset-updates
repo get-ws-create-task conn graph-uuid aes-key add-log-fn asset-update-ops))))))
(defn create-assets-sync-loop
[repo get-ws-create-task graph-uuid major-schema-version conn *auto-push? *aes-key]
(let [started-dfv (m/dfv)
add-log-fn (fn [type message]
(assert (map? message) message)
(rtc-log-and-state/rtc-log type (assoc message :graph-uuid graph-uuid)))
mixed-flow (create-mixed-flow repo *auto-push?)]
{:onstarted-task started-dfv
:assets-sync-loop-task
(holding-assets-sync-lock
started-dfv
(m/sp
(try
(log/info :rtc-asset :loop-starting)
;; check aes-key exists
(when (ldb/get-graph-rtc-e2ee? @conn) (assert @*aes-key))
(started-dfv true)
(m/? (new-task--initial-download-missing-assets
repo get-ws-create-task graph-uuid conn @*aes-key add-log-fn))
(->>
(let [event (m/?> mixed-flow)]
(case (:type event)
:remote-updates
(when-let [asset-update-ops (not-empty (:value event))]
(m/? (new-task--pull-remote-asset-updates
repo get-ws-create-task conn graph-uuid @*aes-key add-log-fn asset-update-ops)))
:local-update-check
(m/? (new-task--push-local-asset-updates
repo get-ws-create-task conn graph-uuid major-schema-version @*aes-key add-log-fn))))
m/ap
(m/reduce {} nil)
m/?)
(catch :default e
(let [ex (r.ex/e->ex-info e)]
(add-log-fn :rtc.asset.log/cancelled {:e ex})
(throw ex))))))}))
(comment
(def x (atom 1))
(def f (m/ap
(let [r (m/?> (m/buffer 10 (m/watch x)))]
(m/? (m/sleep 2000))
r)))
(def cancel ((m/reduce (fn [r e] (prn :e e)) f) prn prn)))

View File

@@ -1,37 +0,0 @@
(ns frontend.worker.rtc.branch-graph
"Fns to migrate rtc graphs when client-graph-schema and server-graph-schema not matching
* when to upload/download to/from remote graph?
suppose we have client-schema=X and server-schema=Y.
there're several different schema-version graphs on server at the same time.
- if X = Y, nothing need to do with migration
- if X > Y, client-graph is newer than server-graph, we need to upload this client-graph
- if X < Y, client-app need to upgrade, otherwise, this client will keep rtc with server-graph-X
- if X < Y, and client-app upgraded, now it should download the server-graph-Y"
(:require [logseq.db.frontend.schema :as db-schema]))
(defn compare-schemas
"Return one of [:create-branch :download nil].
when nil, nothing need to do"
[server-graph-schema app-schema client-graph-schema]
(let [[server-graph-schema app-schema client-graph-schema]
(map db-schema/major-version [server-graph-schema app-schema client-graph-schema])]
(cond
(= server-graph-schema client-graph-schema)
nil
(> server-graph-schema client-graph-schema)
(cond
;; client will do some migrations on local-graph,
;; so do nothing for now
(< server-graph-schema app-schema) nil
;; client-app-schema < server-graph-schema,
;; so app need to be upgraded, do nothing for now
(> server-graph-schema app-schema) nil
(= server-graph-schema app-schema) :download)
(< server-graph-schema client-graph-schema)
(cond
;; this remote-graph branch is creating now,
;; disallow upload a new schema-version graph for now
(>= server-graph-schema app-schema) nil
(< server-graph-schema app-schema) :create-branch))))

View File

@@ -1,606 +0,0 @@
(ns frontend.worker.rtc.client
"Fns about push local updates"
(:require [clojure.string :as string]
[datascript.core :as d]
[frontend.common.crypt :as crypt]
[frontend.common.missionary :as c.m]
[frontend.worker.flows :as worker-flows]
[frontend.worker.rtc.branch-graph :as r.branch-graph]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.const :as rtc-const]
[frontend.worker.rtc.exception :as r.ex]
[frontend.worker.rtc.gen-client-op :as gen-client-op]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.rtc.remote-update :as r.remote-update]
[frontend.worker.rtc.skeleton :as r.skeleton]
[frontend.worker.rtc.throttle :as r.throttle]
[frontend.worker.rtc.ws :as ws]
[frontend.worker.rtc.ws-util :as ws-util]
[lambdaisland.glogi :as log]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]
[missionary.core :as m]
[tick.core :as tick]))
(defn- task--apply-remote-updates-from-apply-ops
[apply-ops-resp graph-uuid repo conn aes-key add-log-fn]
(m/sp
(if-let [remote-ex (:ex-data apply-ops-resp)]
(do (add-log-fn :rtc.log/pull-remote-data (assoc remote-ex :sub-type :pull-remote-data-exception))
(case (:type remote-ex)
:graph-lock-failed nil
:graph-lock-missing
(throw r.ex/ex-remote-graph-lock-missing)
:rtc.exception/get-s3-object-failed
(throw (ex-info (:ex-message apply-ops-resp) (:ex-data apply-ops-resp)))
;;else
(throw (ex-info "Unavailable3" {:remote-ex remote-ex}))))
(do (assert (and (pos? (:t apply-ops-resp)) (pos? (:t-query-end apply-ops-resp))) apply-ops-resp)
(m/?
(r.remote-update/task--apply-remote-update
graph-uuid repo conn {:type :remote-update :value apply-ops-resp} aes-key add-log-fn))))))
(defn- new-task--init-request
[get-ws-create-task graph-uuid major-schema-version repo conn *last-calibrate-t *server-schema-version add-log-fn]
(m/sp
(let [t-before (client-op/get-local-tx repo)
get-graph-skeleton? (or (nil? @*last-calibrate-t)
(< 500 (- t-before @*last-calibrate-t)))]
(try
(let [{_remote-t :t
remote-t-query-end :t-query-end
server-schema-version :server-schema-version
server-builtin-db-idents :server-builtin-db-idents
:as resp}
(m/? (ws-util/send&recv get-ws-create-task
{:action "init-request"
:graph-uuid graph-uuid
:schema-version (str major-schema-version)
:api-version "20251124"
:t-before t-before
:get-graph-skeleton get-graph-skeleton?}
:timeout-ms 30000))]
(if-let [remote-ex (:ex-data resp)]
(do
(add-log-fn :rtc.log/init-request remote-ex)
(case (:type remote-ex)
:graph-lock-failed nil
:graph-lock-missing (throw r.ex/ex-remote-graph-lock-missing)
;; else
(throw (ex-info "Unavailable4" {:remote-ex remote-ex}))))
(do
(when server-schema-version
(reset! *server-schema-version server-schema-version)
(reset! *last-calibrate-t remote-t-query-end))
(when remote-t-query-end
(rtc-log-and-state/update-remote-t graph-uuid remote-t-query-end)
(when (not t-before)
(client-op/update-local-tx repo remote-t-query-end)))
(when (and server-schema-version server-builtin-db-idents)
(r.skeleton/calibrate-graph-skeleton server-schema-version server-builtin-db-idents @conn))
resp)))
(catch :default e
(if (= :rtc.exception/remote-graph-not-ready (:type (ex-data e)))
(throw (ex-info "remote graph is still creating" {:missionary/retry true} e))
(throw e)))))))
(def ^:private *register-graph-updates-sent
"ws -> [bool, added-inst, [graph-uuid,major-schema-version,repo]]"
(atom {}))
(defn- clean-old-keys-in-sent!
[]
(let [hours-ago (tick/<< (tick/instant) (tick/new-duration 3 :hours))
old-ks
(keep (fn [[k [_ added-inst]]]
(when (tick/< added-inst hours-ago)
k))
@*register-graph-updates-sent)]
(doseq [k old-ks]
(swap! *register-graph-updates-sent dissoc k))))
(defn ensure-register-graph-updates--memoized
"Return a task: get or create a mws(missionary wrapped websocket).
see also `ws/get-mws-create`.
But ensure `init-request` and `calibrate-graph-skeleton` has been sent"
[get-ws-create-task graph-uuid major-schema-version repo conn
*last-calibrate-t *online-users *server-schema-version *aes-key add-log-fn]
(m/sp
(let [ws (m/? get-ws-create-task)
sent-3rd-value [graph-uuid major-schema-version repo]
origin-v (@*register-graph-updates-sent ws)]
(when (or (nil? origin-v)
(not= (last origin-v) sent-3rd-value))
(swap! *register-graph-updates-sent assoc ws [false (tick/instant) sent-3rd-value])
(clean-old-keys-in-sent!))
(when (not (first (@*register-graph-updates-sent ws)))
(swap! *register-graph-updates-sent assoc-in [ws 0] true)
(let [recv-flow (ws/recv-flow (m/? get-ws-create-task))]
(c.m/run-task :update-online-user-when-register-graph-updates
(m/sp
(when-let [online-users (:online-users
(m/?
(m/timeout
(m/reduce
(fn [_ v]
(when (= "online-users-updated" (:req-id v))
(reduced v)))
recv-flow)
10000)))]
(reset! *online-users online-users)))
:succ (constantly nil)))
(let [{:keys [max-remote-schema-version] :as init-request-resp}
(try
(m/?
(c.m/backoff
{:delay-seq ;retry 5 times if remote-graph is creating (4000 8000 16000 32000 64000)
(take 5 (drop 2 c.m/delays))
:reset-flow worker-flows/online-event-flow}
(new-task--init-request
get-ws-create-task graph-uuid major-schema-version repo conn
*last-calibrate-t *server-schema-version
add-log-fn)))
(catch :default e
(swap! *register-graph-updates-sent assoc-in [ws 0] false)
(throw e)))]
(when max-remote-schema-version
(add-log-fn :rtc.log/higher-remote-schema-version-exists
{:sub-type (r.branch-graph/compare-schemas
max-remote-schema-version db-schema/version major-schema-version)
:repo repo
:graph-uuid graph-uuid
:remote-schema-version max-remote-schema-version}))
(m/? (task--apply-remote-updates-from-apply-ops
init-request-resp graph-uuid repo conn @*aes-key add-log-fn))))
ws)))
(defn- ->pos
[parent-uuid order]
[parent-uuid order])
(defmulti ^:private local-block-ops->remote-ops-aux (fn [tp & _] tp))
(defmethod local-block-ops->remote-ops-aux :move-op
[_ & {:keys [parent-uuid block-order block-uuid *remote-ops *depend-on-block-uuid-set]}]
(let [pos (->pos parent-uuid block-order)]
(swap! *remote-ops conj [:move {:block-uuid block-uuid :pos pos}])
(when parent-uuid
(swap! *depend-on-block-uuid-set conj parent-uuid))))
(defn- card-many-attr?
[db attr]
(= :db.cardinality/many (get-in (d/schema db) [attr :db/cardinality])))
(defn- remove-redundant-av
"Remove previous av if later-av has same [a v] or a"
[db av-coll]
(loop [[av & others] av-coll
r {} ;; [a v] or `a` -> [a v t add?]
;; [a v] as key for card-many attr, `a` as key for card-one attr
]
(if-not av
(vals r)
(let [[a v _t _add?] av
av-key (if (card-many-attr? db a) [a v] a)]
(if-let [old-av (get r av-key)]
(recur others
(cond
(< (nth old-av 2) (nth av 2)) (assoc r av-key av)
(> (nth old-av 2) (nth av 2)) r
(true? (nth av 3)) (assoc r av-key av)
:else r))
(recur others (assoc r av-key av)))))))
(defn- remove-non-exist-ref-av
"Remove av if its v is ref(block-uuid) and not exist"
[db av-coll]
(remove
(fn [av]
(let [[_a v _t add?] av]
;; when add?=false, no need to care this ref exists or not
(and add?
(uuid? v)
(nil? (d/entity db [:block/uuid v])))))
av-coll))
(defn- group-by-schema-attrs
[av-coll]
(let [{schema-av-coll true other-av-coll false}
(group-by (fn [av] (contains? #{:db/valueType :db/cardinality :db/index} (first av))) av-coll)]
[schema-av-coll other-av-coll]))
(defn- schema-av-coll->update-schema-op
[db block-uuid db-ident schema-av-coll]
(when (and (seq schema-av-coll) db-ident)
(when-let [ent (d/entity db db-ident)]
(when (ldb/property? ent)
[:update-schema
(cond-> {:block-uuid block-uuid
:db/ident db-ident
:db/valueType (or (:db/valueType ent) :db.type/string)}
(:db/cardinality ent) (assoc :db/cardinality (:db/cardinality ent))
(:db/index ent) (assoc :db/index (:db/index ent)))]))))
(defn- av-coll->card-one-attrs
[db-schema av-coll]
(let [a-coll (distinct (map first av-coll))]
(filter
(fn [a]
(when-let [ns (namespace a)]
(and
(or (string/starts-with? ns "logseq.property")
(string/ends-with? ns ".property"))
(= :db.cardinality/one (:db/cardinality (db-schema a)))))) a-coll)))
(defmethod local-block-ops->remote-ops-aux :add-op
[_ & {:keys [db block add-op parent-uuid block-order *remote-ops *depend-on-block-uuid-set]}]
(let [block-uuid (:block/uuid block)
pos (->pos parent-uuid block-order)
av-coll (->> (:av-coll (last add-op))
(remove-redundant-av db)
(remove-non-exist-ref-av db))
[schema-av-coll other-av-coll] (group-by-schema-attrs av-coll)
update-schema-op (schema-av-coll->update-schema-op db block-uuid (:db/ident block) schema-av-coll)
depend-on-block-uuids (keep (fn [[_a v]] (when (uuid? v) v)) other-av-coll)
card-one-attrs (seq (av-coll->card-one-attrs (d/schema db) other-av-coll))]
(when (seq other-av-coll)
(swap! *remote-ops conj
[:add (cond-> {:block-uuid block-uuid
:pos pos
:av-coll other-av-coll}
(:db/ident block) (assoc :db/ident (:db/ident block))
card-one-attrs (assoc :card-one-attrs card-one-attrs))]))
(when update-schema-op
(swap! *remote-ops conj update-schema-op))
(swap! *depend-on-block-uuid-set (partial apply conj) depend-on-block-uuids)
(when parent-uuid
(swap! *depend-on-block-uuid-set conj parent-uuid))))
(defmethod local-block-ops->remote-ops-aux :update-op
[_ & {:keys [db block update-op block-order parent-uuid *remote-ops *depend-on-block-uuid-set]}]
(let [block-uuid (:block/uuid block)
pos (->pos parent-uuid block-order)
av-coll (->> (:av-coll (last update-op))
(remove-redundant-av db)
(remove-non-exist-ref-av db))
[schema-av-coll other-av-coll] (group-by-schema-attrs av-coll)
update-schema-op (schema-av-coll->update-schema-op db block-uuid (:db/ident block) schema-av-coll)
depend-on-block-uuids (keep (fn [[_a v]] (when (uuid? v) v)) other-av-coll)
card-one-attrs (seq (av-coll->card-one-attrs (d/schema db) other-av-coll))]
(when (seq other-av-coll)
(swap! *remote-ops conj
[:update (cond-> {:block-uuid block-uuid
:pos pos
:av-coll other-av-coll}
(:db/ident block) (assoc :db/ident (:db/ident block))
card-one-attrs (assoc :card-one-attrs card-one-attrs))]))
(when update-schema-op
(swap! *remote-ops conj update-schema-op))
(swap! *depend-on-block-uuid-set (partial apply conj) depend-on-block-uuids)))
(defmethod local-block-ops->remote-ops-aux :update-page-op
[_ & {:keys [db block-uuid *remote-ops]}]
(when-let [{page-name :block/name title :block/title db-ident :db/ident}
(d/entity db [:block/uuid block-uuid])]
(swap! *remote-ops conj
[:update-page (cond-> {:block-uuid block-uuid
:page-name (ldb/write-transit-str page-name)
:block/title (ldb/write-transit-str (or title page-name))}
db-ident (assoc :db/ident db-ident))])))
(defmethod local-block-ops->remote-ops-aux :remove-op
[_ & {:keys [db remove-op *remote-ops]}]
(when-let [block-uuid (:block-uuid (last remove-op))]
(when (nil? (d/entity db [:block/uuid block-uuid]))
(swap! *remote-ops conj [:remove {:block-uuids [block-uuid]}]))))
(defmethod local-block-ops->remote-ops-aux :remove-page-op
[_ & {:keys [db remove-page-op *remote-ops]}]
(when-let [block-uuid (:block-uuid (last remove-page-op))]
(when (nil? (d/entity db [:block/uuid block-uuid]))
(swap! *remote-ops conj [:remove-page {:block-uuid block-uuid}]))))
(defn- local-block-ops->remote-ops
[db block-ops]
(let [*depend-on-block-uuid-set (atom #{})
*remote-ops (atom [])
{move-op :move remove-op :remove update-op :update add-op :add update-page-op :update-page remove-page-op :remove-page}
block-ops]
(when-let [block-uuid (some (comp :block-uuid last) [move-op update-op add-op update-page-op])]
(when-let [block (d/entity db [:block/uuid block-uuid])]
(let [parent-uuid (some-> block :block/parent :block/uuid)]
;; remote-move-op
(when move-op
(local-block-ops->remote-ops-aux :move-op
:parent-uuid parent-uuid
:block-order (:block/order block)
:block-uuid block-uuid
:*remote-ops *remote-ops
:*depend-on-block-uuid-set *depend-on-block-uuid-set))
;; remote-add-op
(when add-op
(local-block-ops->remote-ops-aux :add-op
:db db
:block block
:add-op add-op
:parent-uuid parent-uuid
:block-order (:block/order block)
:*remote-ops *remote-ops
:*depend-on-block-uuid-set *depend-on-block-uuid-set))
;; remote-update-op
(when update-op
(local-block-ops->remote-ops-aux :update-op
:db db
:block block
:update-op update-op
:parent-uuid parent-uuid
:block-order (:block/order block)
:*remote-ops *remote-ops
:*depend-on-block-uuid-set *depend-on-block-uuid-set)))
;; remote-update-page-op
(when update-page-op
(local-block-ops->remote-ops-aux :update-page-op
:db db
:block-uuid block-uuid
:*remote-ops *remote-ops))))
;; remote-remove-op
(when remove-op
(local-block-ops->remote-ops-aux :remove-op
:db db
:remove-op remove-op
:*remote-ops *remote-ops))
;; remote-remove-page-op
(when remove-page-op
(local-block-ops->remote-ops-aux :remove-page-op
:db db
:remove-page-op remove-page-op
:*remote-ops *remote-ops))
{:remote-ops (into {} @*remote-ops)
:depend-on-block-uuids @*depend-on-block-uuid-set}))
(defn- gen-block-uuid->remote-ops
[db block-ops-map-coll]
(into {}
(map
(fn [block-ops-map]
[(:block/uuid block-ops-map)
(:remote-ops (local-block-ops->remote-ops db block-ops-map))]))
block-ops-map-coll))
(defn- local-update-kv-value-ops->remote-ops
[update-kv-value-ops-map]
(keep
(fn [[op-type op]]
(when (= :update-kv-value op-type)
(let [{:keys [db-ident value]} (last op)]
[:update-kv-value {:db-ident db-ident :value (ldb/write-transit-str value)}])))
update-kv-value-ops-map))
(defn- gen-update-kv-value-remote-ops
[update-kv-value-ops-map-coll]
(mapcat local-update-kv-value-ops->remote-ops update-kv-value-ops-map-coll))
(defn- local-rename-db-ident-ops->remote-ops
[rename-db-ident-ops-map]
(keep (fn [[op-type op]]
(when (keyword-identical? :rename-db-ident op-type)
[:rename-db-ident (select-keys (last op) [:db-ident-or-block-uuid :new-db-ident])]))
rename-db-ident-ops-map))
(defn- gen-rename-db-ident-remote-ops
[rename-db-ident-ops-map-coll]
(mapcat local-rename-db-ident-ops->remote-ops rename-db-ident-ops-map-coll))
(defn- merge-remove-remove-ops
[remote-remove-ops]
(when-let [block-uuids (->> remote-remove-ops
(mapcat (fn [[_ {:keys [block-uuids]}]] block-uuids))
distinct
seq)]
[[:remove {:block-uuids block-uuids}]]))
(defn- sort-remote-ops
[block-uuid->remote-ops]
(let [block-uuid->dep-uuid
(into {}
(keep (fn [[block-uuid remote-ops]]
(when-let [move-op (get remote-ops :move)]
[block-uuid (:target-uuid move-op)])))
block-uuid->remote-ops)
all-move-uuids (set (keys block-uuid->dep-uuid))
;; TODO: use `sort-coll-by-dependency`
sorted-uuids
(loop [r []
rest-uuids all-move-uuids
uuid (first rest-uuids)]
(if-not uuid
r
(let [dep-uuid (block-uuid->dep-uuid uuid)]
(if-let [next-uuid (get rest-uuids dep-uuid)]
(recur r rest-uuids next-uuid)
(let [rest-uuids* (disj rest-uuids uuid)]
(recur (conj r uuid) rest-uuids* (first rest-uuids*)))))))
sorted-move-ops (keep
(fn [block-uuid]
(some->> (get-in block-uuid->remote-ops [block-uuid :move])
(vector :move)))
sorted-uuids)
add-ops (keep
(fn [[_ remote-ops]]
(some->> (:add remote-ops) (vector :add)))
block-uuid->remote-ops)
update-schema-ops (keep
(fn [[_ remote-ops]]
(some->> (:update-schema remote-ops) (vector :update-schema)))
block-uuid->remote-ops)
remove-ops (merge-remove-remove-ops
(keep
(fn [[_ remote-ops]]
(some->> (:remove remote-ops) (vector :remove)))
block-uuid->remote-ops))
update-ops (keep
(fn [[_ remote-ops]]
(some->> (:update remote-ops) (vector :update)))
block-uuid->remote-ops)
update-page-ops (keep
(fn [[_ remote-ops]]
(some->> (:update-page remote-ops) (vector :update-page)))
block-uuid->remote-ops)
remove-page-ops (keep
(fn [[_ remote-ops]]
(some->> (:remove-page remote-ops) (vector :remove-page)))
block-uuid->remote-ops)]
(concat add-ops update-schema-ops update-page-ops remove-ops sorted-move-ops update-ops remove-page-ops)))
(defn- rollback
[repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll]
(let [block-ops
(mapcat
(fn [m]
(keep (fn [[k op]]
(when-not (keyword-identical? :block/uuid k)
op))
m))
block-ops-map-coll)
update-kv-value-ops
(mapcat
(fn [m]
(keep (fn [[k op]]
(when (keyword-identical? :update-kv-value k)
op))
m))
update-kv-value-ops-map-coll)
rename-db-ident-ops
(mapcat
(fn [m]
(keep (fn [[k op]]
(when (keyword-identical? :rename-db-ident k)
op))
m))
rename-db-ident-ops-map-coll)]
(client-op/add-ops! repo block-ops)
(client-op/add-ops! repo update-kv-value-ops)
(client-op/add-ops! repo rename-db-ident-ops)
nil))
(defn- task--encrypt-remote-ops
[aes-key remote-ops]
(assert aes-key)
(let [encrypt-attr-set (conj rtc-const/encrypt-attr-set :page-name)]
(m/sp
(loop [[remote-op & rest-remote-ops] remote-ops
result []]
(if-not remote-op
result
(let [[op-type op-value] remote-op]
(case op-type
:update-page
(recur rest-remote-ops
(conj result
[op-type (c.m/<? (crypt/<encrypt-map aes-key encrypt-attr-set op-value))]))
:update
(let [av-coll* (c.m/<?
(crypt/<encrypt-av-coll
aes-key rtc-const/encrypt-attr-set (:av-coll op-value)))]
(recur rest-remote-ops
(conj result [op-type (assoc op-value :av-coll av-coll*)])))
:add
(let [av-coll* (c.m/<?
(crypt/<encrypt-av-coll
aes-key rtc-const/encrypt-attr-set (:av-coll op-value)))]
(recur rest-remote-ops
(conj result [op-type (assoc op-value :av-coll av-coll*)])))
;; else
(recur rest-remote-ops (conj result remote-op)))))))))
(defn new-task--push-local-ops
"Return a task: push local updates"
[repo conn graph-uuid major-schema-version get-ws-create-task *remote-profile? aes-key add-log-fn]
(m/sp
(let [block-ops-map-coll (client-op/get&remove-all-block-ops repo)
update-kv-value-ops-map-coll (client-op/get&remove-all-update-kv-value-ops repo)
rename-db-ident-ops-map-coll (client-op/get&remove-all-rename-db-ident-ops repo)
block-uuid->remote-ops (not-empty (gen-block-uuid->remote-ops @conn block-ops-map-coll))
rename-db-ident-remote-ops (gen-rename-db-ident-remote-ops rename-db-ident-ops-map-coll)
other-remote-ops (gen-update-kv-value-remote-ops update-kv-value-ops-map-coll)
remote-ops (concat
rename-db-ident-remote-ops
(when block-uuid->remote-ops (sort-remote-ops block-uuid->remote-ops))
other-remote-ops)]
(when-let [ops-for-remote (rtc-api-schema/to-ws-ops-decoder remote-ops)]
(let [local-tx (client-op/get-local-tx repo)
ops-for-remote* (if aes-key
(m/? (task--encrypt-remote-ops aes-key ops-for-remote))
ops-for-remote)
r (try
(let [message (cond-> {:action "apply-ops"
:graph-uuid graph-uuid
:schema-version (str major-schema-version)
:api-version "20251124"
:ops ops-for-remote*
:t-before local-tx}
(true? @*remote-profile?) (assoc :profile true))
r (m/? (ws-util/send&recv get-ws-create-task message :timeout-ms 30000))]
(r.throttle/add-rtc-api-call-record! message)
r)
(catch :default e
(rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
(throw e)))]
(if-let [remote-ex (:ex-data r)]
(do (add-log-fn :rtc.log/push-local-update remote-ex)
(case (:type remote-ex)
;; - :graph-lock-failed
;; conflict-update remote-graph, keep these local-pending-ops
;; and try to send ops later
:graph-lock-failed
(rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
;; - :graph-lock-missing
;; this case means something wrong in remote-graph data,
;; nothing to do at client-side
:graph-lock-missing
(do (rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
(throw r.ex/ex-remote-graph-lock-missing))
:rtc.exception/get-s3-object-failed
(rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
;; else
(do (rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
(throw (ex-info "Unavailable1" {:remote-ex remote-ex})))))
(if-let [not-found-target-ops (seq (:not-found-target-ops r))]
(do (rollback repo block-ops-map-coll update-kv-value-ops-map-coll rename-db-ident-ops-map-coll)
;; add more ents into ops for remote
(let [ents (keep
(fn [op]
(let [block-uuid (:block-uuid (second op))]
(assert block-uuid)
(d/entity @conn [:block/uuid block-uuid])))
not-found-target-ops)
extra-ops (gen-client-op/generate-rtc-ops-from-entities+parents ents)]
(log/error :not-found-target-ops not-found-target-ops
:extra-ops extra-ops)
(client-op/add-ops! repo extra-ops)))
(do (assert (and (pos? (:t r)) (pos? (:t-query-end r))) r)
(m/?
(r.remote-update/task--apply-remote-update
graph-uuid repo conn {:type :remote-update :value r} aes-key add-log-fn))
(add-log-fn :rtc.log/push-local-update {:remote-t (:t r) :remote-t-query-end (:t-query-end r)})))))))))
(defn new-task--pull-remote-data
[repo conn graph-uuid major-schema-version get-ws-create-task aes-key add-log-fn]
(m/sp
(let [local-tx (client-op/get-local-tx repo)
message {:action "apply-ops"
:graph-uuid graph-uuid
:schema-version (str major-schema-version)
:api-version "20251124"
:ops []
:t-before local-tx}
r (m/? (ws-util/send&recv get-ws-create-task message :timeout-ms 30000))]
(r.throttle/add-rtc-api-call-record! message)
(m/? (task--apply-remote-updates-from-apply-ops r graph-uuid repo conn aes-key add-log-fn)))))

View File

@@ -1,591 +0,0 @@
(ns frontend.worker.rtc.client-op
"Store client-ops in a persisted datascript"
(:require [datascript.core :as d]
[frontend.common.missionary :as c.m]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[logseq.db :as ldb]
[malli.core :as ma]
[malli.transform :as mt]
[missionary.core :as m]))
(def op-schema
[:multi {:dispatch first}
[:update-kv-value
;; update :logseq.kv/xxx entities
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:db-ident :keyword]
[:value :any]]]]]
[:rename-db-ident
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:db-ident-or-block-uuid [:or :keyword :uuid]]
[:new-db-ident :keyword]]]]]
[:move
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:remove
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:update-page
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:remove-page
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:update
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]
[:av-coll [:sequential rtc-api-schema/av-schema]]]]]]
[:add
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]
[:av-coll [:sequential rtc-api-schema/av-schema]]]]]]
[:update-asset
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:remove-asset
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]])
(def ops-schema [:sequential op-schema])
(def ops-coercer (ma/coercer ops-schema mt/json-transformer nil
#(do (log/error ::bad-ops (:value %))
(ma/-fail! ::ops-schema (select-keys % [:value])))))
(def ^:private block-op-types #{:move :remove :update-page :remove-page :update :add})
(def ^:private asset-op-types #{:update-asset :remove-asset})
(def ^:private update-kv-value-op-types #{:update-kv-value})
(def ^:private db-ident-rename-op-types #{:rename-db-ident})
(def schema-in-db
"TODO: rename this db-name from client-op to client-metadata+op.
and move it to its own namespace."
{:block/uuid {:db/unique :db.unique/identity}
:db-ident {:db/unique :db.unique/identity}
:db-ident-or-block-uuid {:db/unique :db.unique/identity}
;; local-tx is the latest remote-tx that local db persists
:local-tx {:db/index true}
:graph-uuid {:db/index true}
:db-sync/checksum {:db/index true}
:db-sync/tx-id {:db/unique :db.unique/identity}
:db-sync/created-at {:db/index true}
:db-sync/tx-data {}
:db-sync/normalized-tx-data {}
:db-sync/reversed-tx-data {}})
(defn update-graph-uuid
[repo graph-uuid]
{:pre [(some? graph-uuid)]}
(when-let [conn (worker-state/get-client-ops-conn repo)]
(ldb/transact! conn [[:db/add "e" :graph-uuid graph-uuid]])))
(defn get-graph-uuid
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(:v (first (d/datoms @conn :avet :graph-uuid)))))
(defn update-local-tx
[repo t]
{:pre [(some? t)]}
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [tx-data
(if-let [datom (first (d/datoms @conn :avet :local-tx))]
[:db/add (:e datom) :local-tx t]
(if-let [datom (first (d/datoms @conn :avet :db-sync/checksum))]
[:db/add (:e datom) :local-tx t]
[:db/add "e" :local-tx t]))]
(ldb/transact! conn [tx-data]))))
(comment
(defn update-local-checksum
[repo checksum]
{:pre [(some? checksum)]}
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [tx-data
(if-let [datom (first (d/datoms @conn :avet :db-sync/checksum))]
[:db/add (:e datom) :db-sync/checksum checksum]
(if-let [datom (first (d/datoms @conn :avet :local-tx))]
[:db/add (:e datom) :db-sync/checksum checksum]
[:db/add "e" :db-sync/checksum checksum]))]
(ldb/transact! conn [tx-data])))))
(defn remove-local-tx
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(when-let [datom (first (d/datoms @conn :avet :local-tx))]
(ldb/transact! conn [[:db/retract (:e datom) :local-tx]
[:db/retract (:e datom) :db-sync/checksum]]))))
(defn get-local-tx
[repo]
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [r (:v (first (d/datoms @conn :avet :local-tx)))]
;; (assert (some? r))
r)))
(comment
(defn get-local-checksum
[repo]
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(:v (first (d/datoms @conn :avet :db-sync/checksum))))))
(defn- merge-update-ops
[op1 op2]
{:pre [(contains? #{:add :update} (first op1))
(= :update (first op2))
(= (:block-uuid (last op1))
(:block-uuid (last op2)))]}
(let [t1 (second op1)
t2 (second op2)
op-type1 (first op1)
op-type2 (first op2)]
(if (> t1 t2)
(merge-update-ops op2 op1)
(let [{av-coll1 :av-coll block-uuid :block-uuid} (last op1)
{av-coll2 :av-coll} (last op2)
result-op-type (if (or (= :add op-type1) (= :add op-type2)) :add :update)]
[result-op-type t2
{:block-uuid block-uuid
:av-coll (concat av-coll1 av-coll2)}]))))
(defn- merge-block-ops
"Carefully compare t among ops.
Return merged block-op-map."
[current-block-op-map op-to-add]
(let [[op-type op-t _value] op-to-add
{[_ remove-op-t _remove-op-value :as remove-op] :remove
[_ move-op-t _move-op-value :as move-op] :move
[_ update-op-t _update-op-value :as update-op] :update
[_ update-page-op-t _update-page-op-value :as update-page-op] :update-page
[_ remove-page-op-t _remove-page-op-value :as remove-page-op] :remove-page
[_ add-op-t _add-op-value :as add-op] :add}
(into {} (filter (fn [[_op-type op]] (some-> op (not= :retract))) current-block-op-map))]
(case op-type
:add
(if (>= remove-op-t op-t) current-block-op-map
(cond-> (assoc current-block-op-map :remove :retract)
(or (nil? add-op) (> op-t add-op-t)) (assoc :add op-to-add)))
:move
(if (>= remove-op-t op-t) current-block-op-map
(if add-op
(let [[_ add-t add-val] add-op
new-t (max add-t op-t)]
(assoc current-block-op-map :add [:add new-t add-val]))
(cond-> (assoc current-block-op-map :remove :retract)
(or (nil? move-op) (> op-t move-op-t)) (assoc :move op-to-add))))
:update
(if (>= remove-op-t op-t) current-block-op-map
(if add-op
(assoc current-block-op-map :add (merge-update-ops add-op op-to-add))
(assoc current-block-op-map
:remove :retract
:update (if update-op (merge-update-ops update-op op-to-add) op-to-add))))
:remove
(if (or (>= move-op-t op-t) (>= update-op-t op-t) (and add-op (>= add-op-t op-t)))
current-block-op-map
(cond-> (assoc current-block-op-map :move :retract :update :retract :add :retract)
(or (nil? remove-op) (> op-t remove-op-t)) (assoc :remove op-to-add)))
:update-page
(if (>= remove-page-op-t op-t) current-block-op-map
(cond-> (assoc current-block-op-map :remove-page :retract)
(or (nil? update-page-op) (> op-t update-page-op-t)) (assoc :update-page op-to-add)))
:remove-page
(if (>= update-page-op-t op-t) current-block-op-map
(cond-> (assoc current-block-op-map :update-page :retract)
(or (nil? remove-page-op) (> op-t remove-page-op-t)) (assoc :remove-page op-to-add))))))
(defn- generate-block-ops-tx-data
[client-ops-db ops]
(let [sorted-ops (sort-by second ops)
block-uuids (map (fn [[_op-type _t value]] (:block-uuid value)) sorted-ops)
ents (d/pull-many client-ops-db '[*] (map (fn [block-uuid] [:block/uuid block-uuid]) block-uuids))
op-types [:add :move :update :remove :update-page :remove-page]
init-block-uuid->op-type->op
(into {}
(map (fn [ent]
[(:block/uuid ent)
(into {}
(keep
(fn [op-type]
(when-let [op (get ent op-type)]
[op-type op])))
op-types)]))
ents)
block-uuid->op-type->op
(reduce
(fn [r op]
(let [[_ _ value] op
block-uuid (:block-uuid value)
current-block-op-map (get r block-uuid)]
(assoc r block-uuid (merge-block-ops current-block-op-map op))))
init-block-uuid->op-type->op sorted-ops)]
(mapcat
(fn [[block-uuid op-type->op]]
(let [tmpid (str block-uuid)]
(when-let [tx-data
(not-empty
(keep
(fn [[op-type op]]
(cond
(= :retract op)
[:db.fn/retractAttribute [:block/uuid block-uuid] op-type]
(some? op)
[:db/add tmpid op-type op]))
op-type->op))]
(cons [:db/add tmpid :block/uuid block-uuid] tx-data))))
block-uuid->op-type->op)))
(defn- generate-ident-kv-ops-tx-data
[client-ops-db ops]
(let [sorted-ops (sort-by second ops)
db-idents (map (fn [[_op-type _t value]] (:db-ident value)) sorted-ops)
ents (d/pull-many client-ops-db '[*] (map (fn [db-ident] [:db-ident db-ident]) db-idents))
op-types [:update-kv-value]
init-db-ident->op-type->op
(into {}
(map (fn [ent]
[(:db-ident ent)
(into {}
(keep
(fn [op-type]
(when-let [op (get ent op-type)]
[op-type op])))
op-types)]))
ents)
db-ident->op-type->op
(reduce
(fn [r op]
(let [[op-type _t value] op
db-ident (:db-ident value)]
(case op-type
:update-kv-value
(assoc-in r [db-ident :update-kv-value] op))))
init-db-ident->op-type->op sorted-ops)]
(mapcat
(fn [[db-ident op-type->op]]
(let [tmpid (str db-ident)]
(when-let [tx-data (not-empty
(keep
(fn [[op-type op]]
(when op [:db/add tmpid op-type op]))
op-type->op))]
(cons [:db/add tmpid :db-ident db-ident] tx-data))))
db-ident->op-type->op)))
(defn generate-rename-db-ident-ops-tx-data
[ops]
(let [op-type :rename-db-ident
db-ident-or-block-uuid->op
(reduce
(fn [r op]
(let [[_op-type _t value] op
db-ident-or-block-uuid (:db-ident-or-block-uuid value)]
(assoc r db-ident-or-block-uuid op)))
{} ops)]
(mapcat
(fn [[db-ident-or-block-uuid op]]
(let [tmpid (str db-ident-or-block-uuid "-rename-db-ident")]
[[:db/add tmpid :db-ident-or-block-uuid db-ident-or-block-uuid]
[:db/add tmpid op-type op]]))
db-ident-or-block-uuid->op)))
(defn- partition-ops
"Return [:update-kv-value-ops :rename-db-ident-ops block-ops]"
[ops]
((juxt :update-kv-value :rename-db-ident :block-uuid)
(group-by
(fn [[op-type _t value :as op]]
(cond
(:block-uuid value) :block-uuid
(= :update-kv-value op-type) :update-kv-value
(= :rename-db-ident op-type) :rename-db-ident
:else (throw (ex-info "invalid op" {:op op}))))
ops)))
(defn add-ops!
[repo ops]
(when (seq ops)
(let [conn (worker-state/get-client-ops-conn repo)
ops (ops-coercer ops)
_ (assert (some? conn) repo)
[update-kv-value-ops rename-db-ident-ops block-ops] (partition-ops ops)
tx-data1 (when (seq block-ops) (generate-block-ops-tx-data @conn block-ops))
tx-data2 (when (seq update-kv-value-ops) (generate-ident-kv-ops-tx-data @conn update-kv-value-ops))
tx-data3 (when (seq rename-db-ident-ops) (generate-rename-db-ident-ops-tx-data rename-db-ident-ops))]
(when-let [tx-data (not-empty (concat tx-data1 tx-data2 tx-data3))]
(ldb/transact! conn tx-data)))))
(defn- get-all-block-ops*
"Return e->op-map"
[db]
(->> (d/datoms db :eavt)
(group-by :e)
(keep (fn [[e datoms]]
(let [op-map (into {}
(keep (fn [datom]
(let [a (:a datom)]
(when (or (keyword-identical? :block/uuid a)
(contains? block-op-types a))
[a (:v datom)]))))
datoms)]
(when (and (:block/uuid op-map)
;; count>1 = contains some `block-op-types`
(> (count op-map) 1))
[e op-map]))))
(into {})))
(defn- get-all-update-kv-value-ops*
"Return e->op-map"
[db]
(let [db-ident-datoms (d/datoms db :avet :db-ident)
es (map :e db-ident-datoms)]
(->> (map (fn [e] [e (d/datoms db :eavt e)]) es)
(keep (fn [[e datoms]]
(let [op-map (into {}
(keep (fn [datom]
(let [a (:a datom)]
(when (or (keyword-identical? :db-ident a)
(contains? update-kv-value-op-types a))
[a (:v datom)]))))
datoms)]
(when (and (:db-ident op-map) (> (count op-map) 1))
[e op-map]))))
(into {}))))
(defn- get-all-rename-db-ident-ops*
[db]
(let [db-ident-or-block-uuid-datoms (d/datoms db :avet :db-ident-or-block-uuid)
es (map :e db-ident-or-block-uuid-datoms)]
(->> (map (fn [e] [e (d/datoms db :eavt e)]) es)
(keep (fn [[e datoms]]
(let [op-map (into {}
(keep (fn [datom]
(let [a (:a datom)]
(when (or (keyword-identical? :db-ident-or-block-uuid a)
(contains? db-ident-rename-op-types a))
[a (:v datom)]))))
datoms)]
(when (and (:db-ident-or-block-uuid op-map) (> (count op-map) 1))
[e op-map]))))
(into {}))))
(defn- get&remove-all-block-ops*
[conn]
(let [e->op-map (get-all-block-ops* @conn)
retract-all-tx-data (mapcat (fn [e] (map (fn [a] [:db.fn/retractAttribute e a]) block-op-types))
(keys e->op-map))]
(ldb/transact! conn retract-all-tx-data)
(vals e->op-map)))
(defn- get&remove-all-update-kv-value-ops*
[conn]
(let [e->op-map (get-all-update-kv-value-ops* @conn)
retract-all-tx-data (mapcat (fn [e] (map (fn [a] [:db.fn/retractAttribute e a]) update-kv-value-op-types))
(keys e->op-map))]
(ldb/transact! conn retract-all-tx-data)
(vals e->op-map)))
(defn- get&remove-all-rename-db-ident-ops*
[conn]
(let [e->op-map (get-all-rename-db-ident-ops* @conn)
retract-all-tx-data (mapcat (fn [e] (map (fn [a] [:db.fn/retractAttribute e a]) db-ident-rename-op-types))
(keys e->op-map))]
(ldb/transact! conn retract-all-tx-data)
(vals e->op-map)))
(defn get-all-block-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(mapcat
(fn [m]
(keep (fn [[k v]]
(when (not= :block/uuid k) v))
m))
(vals (get-all-block-ops* @conn)))))
(comment
(defn get-all-db-ident-kv-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(get-all-db-ident-kv-ops* @conn))))
(defn get&remove-all-block-ops
"Return coll of
{:block/uuid ...
:update ...
:move ...
...}"
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(get&remove-all-block-ops* conn)))
(defn get&remove-all-update-kv-value-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(get&remove-all-update-kv-value-ops* conn)))
(defn get&remove-all-rename-db-ident-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(get&remove-all-rename-db-ident-ops* conn)))
(defn get-unpushed-ops-count
"except asset-ops"
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(+
(count (get-all-block-ops* @conn))
(count (get-all-rename-db-ident-ops* @conn))
(count (get-all-update-kv-value-ops* @conn)))))
(defn rtc-db-graph?
"Is RTC enabled"
[repo]
(or (exists? js/process)
(some? (get-graph-uuid repo))))
(defn create-pending-block-ops-count-flow
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(let [db-updated-flow
(m/observe
(fn ctor [emit!]
(d/listen! conn :create-pending-ops-count-flow #(emit! true))
(emit! true)
(fn dtor []
(d/unlisten! conn :create-pending-ops-count-flow))))]
(m/ap
(let [_ (m/?> (c.m/throttle 200 db-updated-flow))]
;; throttle db-updated-flow, because `datom-count` is a time-consuming fn
(get-unpushed-ops-count repo))))))
;;; asset ops
(defn add-asset-ops
[repo asset-ops]
(let [conn (worker-state/get-client-ops-conn repo)
ops (ops-coercer asset-ops)]
(assert (some? conn) repo)
(letfn [(already-removed? [remove-op t]
(some-> remove-op second (> t)))
(update-after-remove? [update-op t]
(some-> update-op second (> t)))]
(doseq [op ops]
(let [[op-type t value] op
{:keys [block-uuid]} value
exist-block-ops-entity (d/entity @conn [:block/uuid block-uuid])
e (:db/id exist-block-ops-entity)]
(when-let [tx-data
(not-empty
(case op-type
:update-asset
(let [remove-asset-op (get exist-block-ops-entity :remove-asset)]
(when-not (already-removed? remove-asset-op t)
(cond-> [{:block/uuid block-uuid
:update-asset op}]
remove-asset-op (conj [:db.fn/retractAttribute e :remove-asset]))))
:remove-asset
(let [update-asset-op (get exist-block-ops-entity :update-asset)]
(when-not (update-after-remove? update-asset-op t)
(cond-> [{:block/uuid block-uuid
:remove-asset op}]
update-asset-op (conj [:db.fn/retractAttribute e :update-asset]))))))]
(ldb/transact! conn tx-data)))))))
(defn add-all-exists-asset-as-ops
[repo]
(let [conn (worker-state/get-datascript-conn repo)
_ (assert (some? conn))
asset-block-uuids (d/q '[:find [?block-uuid ...]
:where
[?b :block/uuid ?block-uuid]
[?b :logseq.property.asset/type]]
@conn)
ops (map
(fn [block-uuid] [:update-asset 1 {:block-uuid block-uuid}])
asset-block-uuids)]
(add-asset-ops repo ops)))
(defn- get-all-asset-ops*
[db]
(->> (d/datoms db :eavt)
(group-by :e)
(keep (fn [[e datoms]]
(let [op-map (into {}
(keep (fn [datom]
(let [a (:a datom)]
(when (or (keyword-identical? :block/uuid a) (contains? asset-op-types a))
[a (:v datom)]))))
datoms)]
(when (and (:block/uuid op-map)
;; count>1 = contains some `asset-op-types`
(> (count op-map) 1))
[e op-map]))))
(into {})))
(defn get-unpushed-asset-ops-count
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(count (get-all-asset-ops* @conn))))
(defn get-all-asset-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(vals (get-all-asset-ops* @conn))))
(defn remove-asset-op
[repo asset-uuid]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(let [ent (d/entity @conn [:block/uuid asset-uuid])]
(when-let [e (:db/id ent)]
(ldb/transact! conn (map (fn [a] [:db.fn/retractAttribute e a]) asset-op-types))))))
(defn create-pending-asset-ops-count-flow
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(let [datom-count-fn (fn [db] (count (get-all-asset-ops* db)))
db-updated-flow
(m/observe
(fn ctor [emit!]
(d/listen! conn :create-pending-asset-ops-count-flow #(emit! true))
(emit! true)
(fn dtor []
(d/unlisten! conn :create-pending-asset-ops-count-flow))))]
(m/ap
(let [_ (m/?> (c.m/throttle 100 db-updated-flow))]
(datom-count-fn @conn))))))

View File

@@ -1,732 +0,0 @@
(ns frontend.worker.rtc.core
"Main(use missionary) ns for rtc related fns"
(:require [clojure.data :as data]
[datascript.core :as d]
[frontend.common.missionary :as c.m]
[frontend.common.thread-api :refer [def-thread-api]]
[frontend.worker-common.util :as worker-util]
[frontend.worker.rtc.asset :as r.asset]
[frontend.worker.rtc.branch-graph :as r.branch-graph]
[frontend.worker.rtc.client :as r.client]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.crypt :as rtc-crypt]
[frontend.worker.rtc.db :as rtc-db]
[frontend.worker.rtc.exception :as r.ex]
[frontend.worker.rtc.full-upload-download-graph :as r.upload-download]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.rtc.remote-update :as r.remote-update]
[frontend.worker.rtc.skeleton]
[frontend.worker.rtc.throttle :as r.throttle]
[frontend.worker.rtc.ws :as ws]
[frontend.worker.rtc.ws-util :as ws-util :refer [gen-get-ws-create-map--memoized]]
[frontend.worker.shared-service :as shared-service]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq.common.config :as common-config]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]
[malli.core :as ma]
[missionary.core :as m])
(:import [missionary Cancelled]))
(def ^:private rtc-state-schema
[:map
[:ws-state {:optional true} [:enum :connecting :open :closing :closed]]])
(def ^:private rtc-state-validator (ma/validator rtc-state-schema))
(def ^:private sentinel (js-obj))
(defn- get-remote-updates
"Return a flow: receive messages from ws,
and filter messages with :req-id=
- `push-updates`
- `online-users-updated`.
- `push-asset-block-updates`"
[get-ws-create-task]
(m/ap
(loop []
(let [ws (m/? get-ws-create-task)
x (try
(m/?> (m/eduction
(filter (fn [data]
(contains?
#{"online-users-updated"
"push-updates"
"push-asset-block-updates"}
(:req-id data))))
(ws/recv-flow ws)))
(catch js/CloseEvent _
sentinel))]
(if (identical? x sentinel)
(recur)
x)))))
(defn- create-pull-remote-updates-flow
"Return a flow: emit to pull remote-updates.
reschedule next emit(INTERVAL-MS later) every time RESCHEDULE-FLOW emit a value.
TODO: add immediate-emit-flow arg,
e.g. when mobile-app becomes active, trigger one pull-remote-updates"
[interval-ms reschedule-flow & [_immediate-emit-flow]]
(let [v {:type :pull-remote-updates}
clock-flow (m/ap
(loop []
(m/amb
(m/? (m/sleep interval-ms v))
(recur))))]
(m/ap
(m/amb
v
(let [_ (m/?< (c.m/continue-flow reschedule-flow))]
(try
(m/?< clock-flow)
(catch Cancelled _ (m/amb))))))))
(defn create-inject-users-info-flow
"Return a flow: emit event if need to notify the server to inject users-info to graph."
[repo online-users-updated-flow]
(m/ap
(if-let [conn (worker-state/get-datascript-conn repo)]
(if-let [online-users (seq (m/?> online-users-updated-flow))]
(let [user-uuid->user (into {} (map (juxt :user/uuid identity) online-users))
user-blocks (keep (fn [user-uuid] (d/entity @conn [:block/uuid user-uuid])) (keys user-uuid->user))]
(if (or (not= (count user-blocks) (count user-uuid->user))
(some
;; check if some attrs not equal among user-blocks and online-users
(fn [user-block]
(let [user (user-uuid->user (:block/uuid user-block))
[diff-r1 diff-r2]
(data/diff
(select-keys user-block [:logseq.property.user/name :logseq.property.user/email :logseq.property.user/avatar])
(update-keys
(select-keys user [:user/name :user/email :user/avatar])
(fn [k] (keyword "logseq.property.user" (name k)))))]
(or (some? diff-r1) (some? diff-r2))))
user-blocks))
(m/amb {:type :inject-users-info}
;; then trigger a pull-remote-updates to update local-graph
{:type :pull-remote-updates :from :x})
(m/amb)))
(m/amb))
(m/amb))))
(defn- create-mixed-flow
"Return a flow that emits all kinds of events:
`:remote-update`: remote-updates data from server
`:remote-asset-block-update`: remote asset-updates from server
`:local-update-check`: event to notify to check if there're some new local-updates, then push to remote.
`:online-users-updated`: online users info updated
`:pull-remote-updates`: pull remote updates
`:inject-users-info`: notify server to inject users-info into the graph
`:assets-sync-loop-stopped`: assets-sync-loop stopped, rtc-loop should stop as well"
[repo get-ws-create-task *auto-push? *online-users *assets-sync-loop-stopped?]
(let [remote-updates-flow (m/eduction
(map (fn [data]
(case (:req-id data)
"push-updates" {:type :remote-update :value data}
"online-users-updated" {:type :online-users-updated :value data}
"push-asset-block-updates" {:type :remote-asset-block-update :value data})))
(get-remote-updates get-ws-create-task))
local-updates-check-flow (m/eduction
(map (fn [data] {:type :local-update-check :value data}))
(r.throttle/create-local-updates-check-flow repo *auto-push? 2000))
inject-user-info-flow (create-inject-users-info-flow repo (m/watch *online-users))
assets-sync-loop-stopped-flow (m/eduction
(keep (fn [v] (when v {:type :assets-sync-loop-stopped})))
(take 1)
(m/watch *assets-sync-loop-stopped?))
mix-flow (c.m/mix remote-updates-flow
local-updates-check-flow
inject-user-info-flow
assets-sync-loop-stopped-flow)]
(c.m/mix mix-flow (create-pull-remote-updates-flow 60000 mix-flow))))
(defn- create-ws-state-flow
[*current-ws]
(m/relieve
(m/ap
(let [ws (m/?< (m/watch *current-ws))]
(try
(if ws
(m/?< (ws/create-mws-state-flow ws))
(m/amb))
(catch Cancelled _
(m/amb)))))))
(defn- create-rtc-state-flow
[ws-state-flow]
(m/latest
(fn [ws-state]
{:post [(rtc-state-validator %)]}
(cond-> {}
ws-state (assoc :ws-state ws-state)))
(m/reductions {} nil ws-state-flow)))
(defn- update-remote-schema-version!
[conn server-schema-version]
(when server-schema-version
(ldb/transact! conn [(ldb/kv :logseq.kv/remote-schema-version server-schema-version)]
{:gen-undo-ops? false
:persist-op? false})))
(defonce ^:private *rtc-lock (atom nil))
(defn- holding-rtc-lock
"Use this fn to prevent multiple rtc-loops at same time.
rtc-loop-task is stateless, but conn is not.
we need to ensure that no two concurrent rtc-loop-tasks are modifying `conn` at the same time"
[started-dfv task]
(m/sp
(when-not (compare-and-set! *rtc-lock nil true)
(let [e (ex-info "Must not run multiple rtc-loops, try later"
{:type :rtc.exception/lock-failed
:missionary/retry true})]
(started-dfv e)
(throw e)))
(try
(m/? task)
(finally
(reset! *rtc-lock nil)))))
(def ^:private *graph-uuid->*online-users (atom {}))
(defn- get-or-create-*online-users
[graph-uuid]
(assert (uuid? graph-uuid) graph-uuid)
(if-let [*online-users (get @*graph-uuid->*online-users graph-uuid)]
*online-users
(let [*online-users (atom nil)]
(swap! *graph-uuid->*online-users assoc graph-uuid *online-users)
*online-users)))
(defn- task--update-*aes-key
[get-ws-create-task db user-uuid graph-uuid *aes-key]
(m/sp
(when (ldb/get-graph-rtc-e2ee? db)
(let [aes-key (m/? (rtc-crypt/task--get-aes-key get-ws-create-task user-uuid graph-uuid))]
(when (nil? aes-key)
(throw (ex-info "not found aes-key" {:type :rtc.exception/not-found-graph-aes-key
:graph-uuid graph-uuid
:user-uuid user-uuid})))
(reset! *aes-key aes-key)))))
(declare new-task--inject-users-info)
(defn- ^:large-vars/cleanup-todo create-rtc-loop
"Return a map with [:rtc-state-flow :rtc-loop-task :*rtc-auto-push? :onstarted-task]
TODO: auto refresh token if needed"
[graph-uuid schema-version repo conn token user-uuid
& {:keys [auto-push? debug-ws-url] :or {auto-push? true}}]
(let [major-schema-version (db-schema/major-version schema-version)
ws-url (or debug-ws-url (ws-util/get-ws-url token))
*auto-push? (atom auto-push?)
*remote-profile? (atom false)
*last-calibrate-t (atom nil)
*online-users (get-or-create-*online-users graph-uuid)
*assets-sync-loop-canceler (atom nil)
*server-schema-version (atom nil)
*aes-key (atom nil)
*assets-sync-loop-stopped (atom nil)
started-dfv (m/dfv)
add-log-fn (fn [type message]
(assert (map? message) message)
(rtc-log-and-state/rtc-log type (assoc message :graph-uuid graph-uuid)))
{:keys [*current-ws] get-ws-create-task0 :get-ws-create-task}
(gen-get-ws-create-map--memoized ws-url)
get-ws-create-task (r.client/ensure-register-graph-updates--memoized
get-ws-create-task0 graph-uuid major-schema-version repo conn
*last-calibrate-t *online-users *server-schema-version *aes-key add-log-fn)
{:keys [assets-sync-loop-task]}
(r.asset/create-assets-sync-loop
repo get-ws-create-task graph-uuid major-schema-version conn *auto-push? *aes-key)
mixed-flow (create-mixed-flow repo get-ws-create-task *auto-push? *online-users *assets-sync-loop-stopped)]
(assert (some? *current-ws))
{:rtc-state-flow (create-rtc-state-flow (create-ws-state-flow *current-ws))
:*rtc-auto-push? *auto-push?
:*rtc-remote-profile? *remote-profile?
:*online-users *online-users
:onstarted-task started-dfv
:rtc-loop-task
(holding-rtc-lock
started-dfv
(m/sp
(try
(log/info :rtc :loop-starting)
;; init run to open a ws
(m/? (task--update-*aes-key get-ws-create-task0 @conn user-uuid graph-uuid *aes-key))
(m/? get-ws-create-task)
;; NOTE: Set dfv after ws connection is established,
;; ensuring the ws connection is already up when the cloud-icon turns green.
(started-dfv true)
(update-remote-schema-version! conn @*server-schema-version)
(reset! *assets-sync-loop-canceler
(c.m/run-task :assets-sync-loop-task
assets-sync-loop-task
:fail (fn [e]
(log/info :assets-sync-loop-task-stopped e)
(reset! *assets-sync-loop-stopped true))))
(->>
(let [event (m/?> mixed-flow)]
(case (:type event)
(:remote-update :remote-asset-block-update)
(try
(m/? (r.remote-update/task--apply-remote-update
graph-uuid repo conn event @*aes-key add-log-fn))
(catch :default e
(if (= :rtc.exception/local-graph-too-old (:type (ex-data e)))
(m/? (r.client/new-task--pull-remote-data
repo conn graph-uuid major-schema-version get-ws-create-task @*aes-key
add-log-fn))
(throw e))))
:local-update-check
(try
(m/? (r.client/new-task--push-local-ops
repo conn graph-uuid major-schema-version
get-ws-create-task *remote-profile? @*aes-key add-log-fn))
(catch :default e
(if (= :rtc.exception/local-graph-too-old (:type (ex-data e)))
(m/? (r.client/new-task--pull-remote-data
repo conn graph-uuid major-schema-version get-ws-create-task @*aes-key
add-log-fn))
(throw e))))
:online-users-updated
(reset! *online-users (:online-users (:value event)))
:pull-remote-updates
(m/? (r.client/new-task--pull-remote-data
repo conn graph-uuid major-schema-version get-ws-create-task @*aes-key
add-log-fn))
:inject-users-info
(m/? (new-task--inject-users-info token graph-uuid major-schema-version))
:assets-sync-loop-stopped
;; assets-sync-loop stopped, then we should stop the whole rtc-loop
(throw (ex-info "assets-sync-loop-stopped" {}))))
(m/ap)
(m/reduce {} nil)
(m/?))
(catch :default e
(let [ex (r.ex/e->ex-info e)]
(add-log-fn :rtc.log/cancelled {:e ex})
(throw ex)))
(finally
(started-dfv :final) ;; ensure started-dfv can recv a value(values except the first one will be disregarded)
(when @*assets-sync-loop-canceler (@*assets-sync-loop-canceler))))))}))
(def ^:private empty-rtc-loop-metadata
{:repo nil
:graph-uuid nil
:local-graph-schema-version nil
:remote-graph-schema-version nil
:user-uuid nil
:rtc-state-flow nil
:*rtc-auto-push? nil
:*rtc-remote-profile? nil
:*online-users nil
:*rtc-lock nil
:canceler nil
:*last-stop-exception nil})
(def ^:private rtc-loop-metadata-keys (set (keys empty-rtc-loop-metadata)))
(defonce ^:private *rtc-loop-metadata (atom empty-rtc-loop-metadata
:validator
(fn [v] (= rtc-loop-metadata-keys (set (keys v))))))
(defn- validate-rtc-start-conditions
"Return exception if validation failed"
[repo token]
(if-let [conn (worker-state/get-datascript-conn repo)]
(let [user-uuid (:sub (worker-util/parse-jwt token))
graph-uuid (ldb/get-graph-rtc-uuid @conn)
schema-version (ldb/get-graph-schema-version @conn)
remote-schema-version (ldb/get-graph-remote-schema-version @conn)
app-schema-version db-schema/version]
(cond
(not user-uuid)
(ex-info "Invalid token" {:type :rtc.exception/invalid-token})
(not graph-uuid)
r.ex/ex-local-not-rtc-graph
(not schema-version)
(ex-info "Not found schema-version" {:type :rtc.exception/not-found-schema-version})
(not remote-schema-version)
(ex-info "Not found remote-schema-version" {:type :rtc.exception/not-found-remote-schema-version})
(apply not= (map db-schema/major-version [app-schema-version remote-schema-version schema-version]))
(ex-info "major schema version mismatch" {:type :rtc.exception/major-schema-version-mismatched
:sub-type
(r.branch-graph/compare-schemas
remote-schema-version app-schema-version schema-version)
:app app-schema-version
:local schema-version
:remote remote-schema-version})
:else
{:conn conn
:user-uuid user-uuid
:graph-uuid graph-uuid
:schema-version schema-version
:remote-schema-version remote-schema-version}))
(ex-info "Not found db-conn" {:type :rtc.exception/not-found-db-conn
:repo repo})))
;;; ================ API ================
(defn- new-task--rtc-start*
[repo token]
(m/sp
(let [{:keys [conn user-uuid graph-uuid schema-version remote-schema-version] :as r}
(validate-rtc-start-conditions repo token)]
(if (instance? ExceptionInfo r)
r
(let [{:keys [rtc-state-flow *rtc-auto-push? *rtc-remote-profile? rtc-loop-task *online-users onstarted-task]}
(create-rtc-loop graph-uuid schema-version repo conn token user-uuid)
*last-stop-exception (atom nil)
canceler (c.m/run-task :rtc-loop-task
rtc-loop-task
:fail (fn [e]
(reset! *last-stop-exception e)
(log/info :rtc-loop-task e)
(when-not (or (instance? Cancelled e) (= "missionary.Cancelled" (ex-message e)))
(println (.-stack e)))
(when (= :rtc.exception/ws-timeout (some-> e ex-data :type))
;; if fail reason is websocket-timeout, try to restart rtc
(worker-state/<invoke-main-thread :thread-api/rtc-start-request repo))))
start-ex (m/? onstarted-task)]
(if (instance? ExceptionInfo start-ex)
(do
(canceler)
start-ex)
(do (reset! *rtc-loop-metadata {:repo repo
:graph-uuid graph-uuid
:local-graph-schema-version schema-version
:remote-graph-schema-version remote-schema-version
:user-uuid user-uuid
:rtc-state-flow rtc-state-flow
:*rtc-auto-push? *rtc-auto-push?
:*rtc-remote-profile? *rtc-remote-profile?
:*online-users *online-users
:*rtc-lock *rtc-lock
:canceler canceler
:*last-stop-exception *last-stop-exception})
nil)))))))
(declare rtc-stop)
(defn new-task--rtc-start
[stop-before-start?]
(m/sp
(let [repo (worker-state/get-current-repo)
token (worker-state/get-id-token)
conn (worker-state/get-datascript-conn repo)]
(if-not (and repo conn token)
(log/info :skip-new-task--rtc-start
{:repo repo
:some?-conn (some? conn)
:some?-token (some? token)})
(do
(when stop-before-start? (rtc-stop))
(let [ex (m/? (new-task--rtc-start* repo token))]
(when-let [ex-data* (ex-data ex)]
(case (:type ex-data*)
(:rtc.exception/not-rtc-graph
:rtc.exception/major-schema-version-mismatched
:rtc.exception/lock-failed)
(log/info :rtc-start-failed ex)
:rtc.exception/not-found-db-conn
(log/error :rtc-start-failed ex)
(log/error :BUG-unknown-error ex))
ex)))))))
(defn rtc-stop
[]
(when-let [canceler (:canceler @*rtc-loop-metadata)]
(canceler)))
(defn rtc-toggle-auto-push
[]
(when-let [*auto-push? (:*rtc-auto-push? @*rtc-loop-metadata)]
(swap! *auto-push? not)))
(defn rtc-toggle-remote-profile
[]
(when-let [*rtc-remote-profile? (:*rtc-remote-profile? @*rtc-loop-metadata)]
(swap! *rtc-remote-profile? not)))
(defn new-task--get-graphs
[token]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/join :graphs
(ws-util/send&recv get-ws-create-task {:action "list-graphs"}))))
(defn new-task--delete-graph
"Return a task that return true if succeed"
[token graph-uuid schema-version]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/sp
(let [{:keys [ex-data]}
(m/? (ws-util/send&recv get-ws-create-task
{:action "delete-graph"
:graph-uuid graph-uuid
:schema-version (str schema-version)}))]
(if ex-data
(log/info ::delete-graph-failed {:graph-uuid graph-uuid :ex-data ex-data})
;; Clean up rtc data in existing dbs so that the graph can be uploaded again
(when-let [repo (worker-state/get-current-repo)]
(when-let [conn (worker-state/get-datascript-conn repo)]
(let [graph-id (ldb/get-graph-rtc-uuid @conn)]
(when (= (str graph-id) (str graph-uuid))
(rtc-db/remove-rtc-data-in-conn! repo))))))
(boolean (nil? ex-data))))))
(defn new-task--get-users-info
"Return a task that return users-info about the graph."
[token graph-uuid]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/join :users
(ws-util/send&recv get-ws-create-task
{:action "get-users-info" :graph-uuid graph-uuid}))))
(defn new-task--inject-users-info
[token graph-uuid major-schema-version]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(ws-util/send&recv get-ws-create-task
{:action "inject-users-info"
:graph-uuid graph-uuid
:schema-version (str major-schema-version)})))
(defn new-task--grant-access-to-others
[token graph-uuid user-uuid target-user-email]
(m/sp
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))
encrypted-aes-key
(m/? (rtc-crypt/task--encrypt-graph-aes-key-by-other-user-public-key
get-ws-create-task graph-uuid user-uuid target-user-email))
resp (m/? (ws-util/send&recv get-ws-create-task
(cond-> {:action "grant-access"
:graph-uuid graph-uuid
:target-user-email+encrypted-aes-key-coll
[{:user/email target-user-email
:encrypted-aes-key (ldb/write-transit-str encrypted-aes-key)}]})))]
(when (:ex-data resp)
(throw (ex-info (:ex-message resp) (:ex-data resp)))))))
(defn new-task--get-block-content-versions
"Return a task that return map [:ex-data :ex-message :versions]"
[token graph-uuid block-uuid]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/join :versions (ws-util/send&recv get-ws-create-task
{:action "query-block-content-versions"
:block-uuids [block-uuid]
:graph-uuid graph-uuid}))))
(def ^:private create-get-state-flow*
(let [rtc-loop-metadata-flow (m/watch *rtc-loop-metadata)]
(m/ap
(let [{*rtc-lock' :*rtc-lock
:keys [repo graph-uuid local-graph-schema-version remote-graph-schema-version
user-uuid rtc-state-flow *rtc-auto-push? *rtc-remote-profile?
*online-users *last-stop-exception]}
(m/?< rtc-loop-metadata-flow)]
(try
(if-not (and repo rtc-state-flow *rtc-auto-push? *rtc-lock')
(m/amb)
(m/?<
(m/latest
(fn [rtc-state rtc-auto-push? rtc-remote-profile?
rtc-lock online-users pending-local-ops-count pending-asset-ops-count
[local-tx remote-tx] last-stop-ex]
{:graph-uuid graph-uuid
:local-graph-schema-version (db-schema/schema-version->string local-graph-schema-version)
:remote-graph-schema-version (db-schema/schema-version->string remote-graph-schema-version)
:user-uuid user-uuid
:unpushed-block-update-count pending-local-ops-count
:pending-asset-ops-count pending-asset-ops-count
:local-tx local-tx
:remote-tx remote-tx
:rtc-state rtc-state
:rtc-lock rtc-lock
:auto-push? rtc-auto-push?
:remote-profile? rtc-remote-profile?
:online-users online-users
:last-stop-exception-ex-data (some-> last-stop-ex ex-data)})
rtc-state-flow
(m/watch *rtc-auto-push?) (m/watch *rtc-remote-profile?)
(m/watch *rtc-lock') (m/watch *online-users)
(client-op/create-pending-block-ops-count-flow repo)
(client-op/create-pending-asset-ops-count-flow repo)
(rtc-log-and-state/create-local&remote-t-flow graph-uuid)
(m/watch *last-stop-exception))))
(catch Cancelled _ (m/amb)))))))
(def ^:private create-get-state-flow (c.m/throttle 300 create-get-state-flow*))
(defn new-task--get-debug-state
[]
(c.m/snapshot-of-flow create-get-state-flow))
(defn new-task--upload-graph
[token repo remote-graph-name]
(let [{:keys [conn schema-version] :as r}
(if-let [conn (worker-state/get-datascript-conn repo)]
(if-let [schema-version (ldb/get-graph-schema-version @conn)]
{:conn conn :schema-version schema-version}
(ex-info "Not found schema-version" {:type :rtc.exception/not-found-schema-version}))
(ex-info "Not found db-conn" {:type :rtc.exception/not-found-db-conn :repo repo}))]
(m/sp
(if (instance? ExceptionInfo r)
r
(let [major-schema-version (db-schema/major-version schema-version)
{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/? (r.upload-download/new-task--upload-graph
get-ws-create-task repo conn remote-graph-name major-schema-version)))))))
(defn new-task--branch-graph
[token repo]
(let [{:keys [conn graph-uuid schema-version] :as r}
(if-let [conn (worker-state/get-datascript-conn repo)]
(if-let [graph-uuid (ldb/get-graph-rtc-uuid @conn)]
(if-let [schema-version (ldb/get-graph-schema-version @conn)]
{:conn conn :graph-uuid graph-uuid :schema-version schema-version}
(ex-info "Not found schema-version" {:type :rtc.exception/not-found-schema-version}))
r.ex/ex-local-not-rtc-graph)
(ex-info "Not found db-conn" {:type :rtc.exception/not-found-db-conn :repo repo}))]
(m/sp
(if (instance? ExceptionInfo r)
r
(let [major-schema-version (db-schema/major-version schema-version)
{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/? (r.upload-download/new-task--branch-graph
get-ws-create-task repo conn graph-uuid major-schema-version)))))))
(defn new-task--request-download-graph
[token graph-uuid schema-version]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(r.upload-download/new-task--request-download-graph get-ws-create-task graph-uuid schema-version)))
(comment
(defn new-task--download-info-list
[token graph-uuid schema-version]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(r.upload-download/new-task--download-info-list get-ws-create-task graph-uuid schema-version))))
(defn new-task--wait-download-info-ready
[token download-info-uuid graph-uuid schema-version timeout-ms]
(let [{:keys [get-ws-create-task]} (gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(r.upload-download/new-task--wait-download-info-ready
get-ws-create-task download-info-uuid graph-uuid schema-version timeout-ms)))
(def new-task--download-graph-from-s3 r.upload-download/new-task--download-graph-from-s3)
(def-thread-api :thread-api/rtc-start
[stop-before-start?]
(new-task--rtc-start stop-before-start?))
(def-thread-api :thread-api/rtc-stop
[]
(rtc-stop))
(def-thread-api :thread-api/rtc-toggle-auto-push
[]
(rtc-toggle-auto-push))
(def-thread-api :thread-api/rtc-toggle-remote-profile
[]
(rtc-toggle-remote-profile))
(def-thread-api :thread-api/rtc-grant-graph-access
[token graph-uuid user-uuid target-user-email]
(new-task--grant-access-to-others token graph-uuid user-uuid target-user-email))
(def-thread-api :thread-api/rtc-get-graphs
[token]
(new-task--get-graphs token))
(def-thread-api :thread-api/rtc-delete-graph
[token graph-uuid schema-version]
(new-task--delete-graph token graph-uuid schema-version))
(def-thread-api :thread-api/rtc-get-users-info
[token graph-uuid]
(new-task--get-users-info token graph-uuid))
(def-thread-api :thread-api/rtc-get-block-content-versions
[token graph-uuid block-uuid]
(new-task--get-block-content-versions token graph-uuid block-uuid))
(def-thread-api :thread-api/rtc-get-debug-state
[]
(new-task--get-debug-state))
(def-thread-api :thread-api/rtc-async-upload-graph
[repo token remote-graph-name]
(new-task--upload-graph token repo remote-graph-name))
(def-thread-api :thread-api/rtc-async-branch-graph
[repo token]
(new-task--branch-graph token repo))
(def-thread-api :thread-api/rtc-request-download-graph
[token graph-uuid schema-version]
(new-task--request-download-graph token graph-uuid schema-version))
(def-thread-api :thread-api/rtc-wait-download-graph-info-ready
[token download-info-uuid graph-uuid schema-version timeout-ms]
(new-task--wait-download-info-ready token download-info-uuid graph-uuid schema-version timeout-ms))
(def-thread-api :thread-api/rtc-download-graph-from-s3
[graph-uuid graph-name s3-url]
(new-task--download-graph-from-s3 graph-uuid graph-name s3-url))
(comment
(def-thread-api :thread-api/rtc-download-info-list
[token graph-uuid schema-version]
(new-task--download-info-list token graph-uuid schema-version)))
;;; ================ API (ends) ================
;;; subscribe state ;;;
(when-not common-config/PUBLISHING
(c.m/run-background-task
::subscribe-state
(m/reduce
(fn [_ v]
(shared-service/broadcast-to-clients! :rtc-sync-state v))
create-get-state-flow)))
(comment
(do
(def user-uuid "7f41990d-2c8f-4f79-b231-88e9f652e072")
(def graph-uuid "ff7186c1-5903-4bc8-b4e9-ca23525b9983")
(def repo "logseq_db_4-23")
(def conn (worker-state/get-datascript-conn repo))
(def debug-ws-url "wss://ws-dev.logseq.com/rtc-sync?token=???")
(let [{:keys [rtc-state-flow *rtc-auto-push? rtc-loop-task]}
(create-rtc-loop user-uuid graph-uuid repo conn nil {:debug-ws-url debug-ws-url})
c (c.m/run-task rtc-loop-task :rtc-loop-task)]
(def cancel c)
(def rtc-state-flow rtc-state-flow)
(def *rtc-auto-push? *rtc-auto-push?)))
(cancel)
(do
(def a (atom 1))
(def f1 (m/watch a))
(def f2 (create-pull-remote-updates-flow 5000 f1))
(def cancel (c.m/run-task (m/reduce (fn [_ v] (prn :v v)) f2) :xxx)))
(defn sleep-emit [delays]
(m/ap (let [n (m/?> (m/seed delays))
r (m/? (m/sleep n n))]
(prn :xxx r (t/now))
r)))
(def cancel
((->> (m/sample vector
(m/latest identity (m/reductions {} 0 (sleep-emit [1000 1 2])))
(sleep-emit [2000 3000 1000]))
(m/reduce (fn [_ v] (prn :v v)))) prn prn))
(let [f (m/stream (m/ap (m/amb 1 2 3 4)))]
((m/reduce (fn [r v] (conj r v)) (m/reductions {} :xxx f)) prn prn)
((m/reduce (fn [r v] (conj r v)) f) prn prn)))

View File

@@ -1,278 +0,0 @@
(ns frontend.worker.rtc.crypt
"rtc e2ee related.
Each user has an RSA key pair.
Each graph has an AES key.
Server stores the encrypted AES key, public key, and encrypted private key."
(:require ["/frontend/idbkv" :as idb-keyval]
[clojure.string :as string]
[frontend.common.crypt :as crypt]
[frontend.common.file.opfs :as opfs]
[frontend.common.missionary :as c.m]
[frontend.common.thread-api :refer [def-thread-api]]
[frontend.worker.rtc.ws-util :as ws-util]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[missionary.core :as m]
[promesa.core :as p])
(:import [missionary Cancelled]))
(defonce ^:private store (delay (idb-keyval/newStore "localforage" "keyvaluepairs" 2)))
(defonce ^:private e2ee-password-file "e2ee-password")
(defonce ^:private native-env?
(let [href (try (.. js/self -location -href)
(catch :default _ nil))]
(boolean (and (string? href)
(or (string/includes? href "electron=true")
(string/includes? href "capacitor=true"))))))
(defn- native-worker?
[]
native-env?)
(defn- <native-save-password-text!
[encrypted-text]
(worker-state/<invoke-main-thread :thread-api/native-save-e2ee-password encrypted-text))
(defn- <native-read-password-text
[]
(worker-state/<invoke-main-thread :thread-api/native-get-e2ee-password))
(defn- <save-e2ee-password
[refresh-token password]
(p/let [result (crypt/<encrypt-text-by-text-password refresh-token password)
text (ldb/write-transit-str result)]
(if (native-worker?)
(-> (p/let [_ (<native-save-password-text! text)]
nil)
(p/catch (fn [e]
(log/error :native-save-e2ee-password {:error e})
(opfs/<write-text! e2ee-password-file text))))
(opfs/<write-text! e2ee-password-file text))))
(defn- <read-e2ee-password
[refresh-token]
(p/let [text (if (native-worker?)
(<native-read-password-text)
(opfs/<read-text! e2ee-password-file))
data (ldb/read-transit-str text)
password (crypt/<decrypt-text-by-text-password refresh-token data)]
password))
(defn- <get-item
[k]
(assert (and k @store))
(p/let [r (idb-keyval/get k @store)]
(js->clj r :keywordize-keys true)))
(defn- <set-item!
[k value]
(assert (and k @store))
(idb-keyval/set k value @store))
(defn- graph-encrypted-aes-key-idb-key
[repo]
(assert (some? repo))
(str "rtc-encrypted-aes-key###" repo))
(defn- <import-public-key-transit-str
"Return js/CryptoKey"
[public-key-transit-str]
(when-let [exported-public-key (ldb/read-transit-str public-key-transit-str)]
(crypt/<import-public-key exported-public-key)))
(defn task--upload-user-rsa-key-pair
"Uploads the user's RSA key pair to the server."
[get-ws-create-task user-uuid public-key encrypted-private-key & {:keys [reset-private-key]
:or {reset-private-key false}}]
(m/sp
(let [exported-public-key-str (ldb/write-transit-str (c.m/<? (crypt/<export-public-key public-key)))
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)
response (m/? (ws-util/send&recv get-ws-create-task
{:action "upload-user-rsa-key-pair"
:user-uuid user-uuid
:public-key exported-public-key-str
:encrypted-private-key encrypted-private-key-str
:reset-private-key reset-private-key}))]
(when (:ex-data response)
(throw (ex-info (:ex-message response) (:ex-data response)))))))
(defn task--reset-user-rsa-key-pair
"Reset rsa-key-pair in server."
[get-ws-create-task user-uuid public-key encrypted-private-key]
(assert (and public-key encrypted-private-key))
(m/sp
(let [exported-public-key-str (ldb/write-transit-str (c.m/<? (crypt/<export-public-key public-key)))
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)
resp (m/? (ws-util/send&recv get-ws-create-task
{:action "reset-user-rsa-key-pair"
:user-uuid user-uuid
:public-key exported-public-key-str
:encrypted-private-key encrypted-private-key-str}))]
(when (:ex-data resp)
(throw (ex-info (:ex-message resp) (:ex-data resp)))))))
(defn task--fetch-user-rsa-key-pair
"Fetches the user's RSA key pair from server.
Return {:public-key CryptoKey, :encrypted-private-key [array,array,array]}
Return nil if not exists"
[get-ws-create-task user-uuid]
(m/sp
(let [response (m/? (ws-util/send&recv get-ws-create-task
{:action "fetch-user-rsa-key-pair"
:user-uuid user-uuid}))]
(if (:ex-data response)
(throw (ex-info (:ex-message response)
(assoc (:ex-data response)
:type :rtc.exception/fetch-user-rsa-key-pair-error)))
(let [{:keys [public-key encrypted-private-key]} response]
(when (and public-key encrypted-private-key)
{:public-key (c.m/<? (<import-public-key-transit-str public-key))
:encrypted-private-key (ldb/read-transit-str encrypted-private-key)}))))))
(defn- task--remote-fetch-graph-encrypted-aes-key
"Return nil if not exists."
[get-ws-create-task graph-uuid]
(m/sp
(let [response (m/? (ws-util/send&recv get-ws-create-task
{:action "fetch-graph-encrypted-aes-key"
:graph-uuid graph-uuid}))]
(if (:ex-data response)
(throw (ex-info (:ex-message response) (assoc (:ex-data response)
:type :rtc.exception/fetch-graph-aes-key-error)))
(ldb/read-transit-str (:encrypted-aes-key response))))))
(defn task--fetch-graph-aes-key
"Fetches the AES key for a graph, from indexeddb or server.
Return nil if not exists"
[get-ws-create-task graph-uuid private-key]
(m/sp
(let [encrypted-aes-key (c.m/<? (<get-item (graph-encrypted-aes-key-idb-key graph-uuid)))]
(if encrypted-aes-key
(c.m/<? (crypt/<decrypt-aes-key private-key encrypted-aes-key))
(when-let [encrypted-aes-key (m/? (task--remote-fetch-graph-encrypted-aes-key get-ws-create-task graph-uuid))]
(let [aes-key (c.m/<? (crypt/<decrypt-aes-key private-key encrypted-aes-key))]
(c.m/<? (<set-item! (graph-encrypted-aes-key-idb-key graph-uuid) encrypted-aes-key))
aes-key))))))
(defn task--persist-graph-encrypted-aes-key
[graph-uuid encrypted-aes-key]
(m/sp
(c.m/<? (<set-item! (graph-encrypted-aes-key-idb-key graph-uuid) encrypted-aes-key))))
(defn task--generate-graph-aes-key
[]
(m/sp (c.m/<? (crypt/<generate-aes-key))))
(defn task--get-decrypted-rsa-key-pair
[get-ws-create-task user-uuid]
(m/sp
(let [{:keys [public-key encrypted-private-key]}
(m/? (task--fetch-user-rsa-key-pair get-ws-create-task user-uuid))
exported-private-key (c.m/<? (worker-state/<invoke-main-thread
:thread-api/decrypt-user-e2ee-private-key encrypted-private-key))
private-key (c.m/<? (crypt/<import-private-key exported-private-key))]
{:public-key public-key
:private-key private-key})))
(defn task--get-aes-key
"Return nil if not exists"
[get-ws-create-task user-uuid graph-uuid]
(m/sp
(let [{:keys [_public-key private-key]} (m/? (task--get-decrypted-rsa-key-pair get-ws-create-task user-uuid))]
(m/? (task--fetch-graph-aes-key get-ws-create-task graph-uuid private-key)))))
(defn task--reset-user-rsa-private-key
"Throw if decrypt encrypted-private-key failed."
[get-ws-create-task refresh-token user-uuid old-password new-password]
(m/sp
(let [{:keys [public-key encrypted-private-key]}
(m/? (task--fetch-user-rsa-key-pair get-ws-create-task user-uuid))
private-key (c.m/<? (crypt/<decrypt-private-key old-password encrypted-private-key))
new-encrypted-private-key (c.m/<? (crypt/<encrypt-private-key new-password private-key))]
(m/? (task--upload-user-rsa-key-pair get-ws-create-task user-uuid public-key new-encrypted-private-key
:reset-private-key true))
(c.m/<? (<save-e2ee-password refresh-token new-password)))))
(defn- task--fetch-user-rsa-public-key
"Fetches the user's RSA public-key from server.
Return js/CryptoKey.
Return nil if not exists"
[get-ws-create-task user-email]
(m/sp
(let [{:keys [public-key] :as response}
(m/? (ws-util/send&recv get-ws-create-task
{:action "fetch-user-rsa-public-key"
:user/email user-email}))]
(if (:ex-data response)
(throw (ex-info (:ex-message response)
(assoc (:ex-data response)
:type :rtc.exception/fetch-user-rsa-public-key-error)))
(when public-key
(c.m/<? (<import-public-key-transit-str public-key)))))))
(defn task--encrypt-graph-aes-key-by-other-user-public-key
"Return encrypted-aes-key,
which is decrypted by current user's private-key, then other-user's public-key"
[get-ws-create-task graph-uuid user-uuid other-user-email]
(m/sp
(when-let [graph-aes-key (m/? (task--get-aes-key get-ws-create-task user-uuid graph-uuid))]
(when-let [public-key (m/? (task--fetch-user-rsa-public-key get-ws-create-task other-user-email))]
(c.m/<? (crypt/<encrypt-aes-key public-key graph-aes-key))))))
(def-thread-api :thread-api/get-user-rsa-key-pair
[token user-uuid]
(m/sp
(let [{:keys [get-ws-create-task]} (ws-util/gen-get-ws-create-map--memoized (ws-util/get-ws-url token))
{:keys [public-key encrypted-private-key]}
(m/? (task--fetch-user-rsa-key-pair get-ws-create-task user-uuid))]
(when (and public-key encrypted-private-key)
{:public-key (c.m/<? (crypt/<export-public-key public-key))
:encrypted-private-key encrypted-private-key}))))
(def-thread-api :thread-api/init-user-rsa-key-pair
[token refresh-token user-uuid]
(m/sp
(try
(let [{:keys [get-ws-create-task]} (ws-util/gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(when-not (m/? (task--fetch-user-rsa-key-pair get-ws-create-task user-uuid))
(let [{:keys [publicKey privateKey]} (c.m/<? (crypt/<generate-rsa-key-pair))
{:keys [password]} (c.m/<? (worker-state/<invoke-main-thread :thread-api/request-e2ee-password))
encrypted-private-key (c.m/<? (crypt/<encrypt-private-key password privateKey))]
(m/? (task--upload-user-rsa-key-pair get-ws-create-task user-uuid publicKey encrypted-private-key))
(c.m/<? (<save-e2ee-password refresh-token password))
nil)))
(catch Cancelled _)
(catch :default e e))))
(def-thread-api :thread-api/reset-user-rsa-key-pair
[token refresh-token user-uuid new-password]
(m/sp
(try
(let [{:keys [get-ws-create-task]} (ws-util/gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(when (some? (m/? (task--fetch-user-rsa-key-pair get-ws-create-task user-uuid)))
(let [{:keys [publicKey privateKey]} (c.m/<? (crypt/<generate-rsa-key-pair))
encrypted-private-key (c.m/<? (crypt/<encrypt-private-key new-password privateKey))]
(m/? (task--reset-user-rsa-key-pair get-ws-create-task user-uuid publicKey encrypted-private-key))
(c.m/<? (<save-e2ee-password refresh-token new-password))
nil)))
(catch Cancelled _)
(catch :default e e))))
(def-thread-api :thread-api/change-e2ee-password
[token refresh-token user-uuid old-password new-password]
(m/sp
(let [{:keys [get-ws-create-task]} (ws-util/gen-get-ws-create-map--memoized (ws-util/get-ws-url token))]
(m/? (task--reset-user-rsa-private-key get-ws-create-task refresh-token user-uuid old-password new-password)))))
(def-thread-api :thread-api/get-e2ee-password
[refresh-token]
(-> (p/let [password (<read-e2ee-password refresh-token)]
{:password password})
(p/catch (fn [e]
(log/error :read-e2ee-password e)
(ex-info ":thread-api/get-e2ee-password" {})))))
(def-thread-api :thread-api/save-e2ee-password
[refresh-token password]
(<save-e2ee-password refresh-token password))

View File

@@ -1,27 +0,0 @@
(ns frontend.worker.rtc.db
"rtc db ops"
(:require [datascript.core :as d]
[frontend.worker.state :as worker-state]
[logseq.db :as ldb]))
(defn remove-rtc-data-from-local-db!
[repo]
(when-let [conn (worker-state/get-datascript-conn repo)]
(ldb/transact! conn [[:db/retractEntity :logseq.kv/graph-uuid]
[:db/retractEntity :logseq.kv/graph-local-tx]
[:db/retractEntity :logseq.kv/remote-schema-version]
[:db/retractEntity :logseq.kv/graph-rtc-e2ee?]])))
(defn reset-client-op-conn
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(let [tx-data (->> (concat (d/datoms @conn :avet :graph-uuid)
(d/datoms @conn :avet :local-tx)
(d/datoms @conn :avet :block/uuid))
(map (fn [datom] [:db/retractEntity (:e datom)])))]
(ldb/transact! conn tx-data))))
(defn remove-rtc-data-in-conn!
[repo]
(remove-rtc-data-from-local-db! repo)
(reset-client-op-conn repo))

View File

@@ -1,27 +0,0 @@
(ns frontend.worker.rtc.db-listener
"listen datascript changes, infer operations from the db tx-report"
(:require [frontend.worker.db-listener :as db-listener]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.debug-log :as rtc-debug-log]
[frontend.worker.rtc.gen-client-op :as gen-client-op]
[frontend.worker.state :as worker-state]))
(comment
;; TODO: make it a qualified-keyword
(defkeywords
:persist-op? {:doc "tx-meta option, generate rtc ops when not nil (default true)"}))
(defmethod db-listener/listen-db-changes :gen-rtc-ops
[_
{:keys [repo same-entity-datoms-coll id->same-entity-datoms]}
{:keys [tx-data tx-meta db-before db-after]}]
(when (and (client-op/rtc-db-graph? repo)
(:persist-op? tx-meta true)
(not (:enabled? @worker-state/*db-sync-config)))
(rtc-debug-log/log-tx! repo tx-data tx-meta)
(let [e->a->add?->v->t (update-vals
id->same-entity-datoms
gen-client-op/entity-datoms=>a->add?->v->t)
ops (gen-client-op/generate-rtc-ops db-before db-after same-entity-datoms-coll e->a->add?->v->t)]
(when (seq ops)
(client-op/add-ops! repo ops)))))

View File

@@ -1,61 +0,0 @@
(ns frontend.worker.rtc.debug-log
"RTC debug logging stored in per-graph sqlite db."
(:require [frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]))
(defn create-tables!
[^js db]
(when db
(.exec db "CREATE TABLE IF NOT EXISTS tx_log (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, tx_data TEXT, tx_meta TEXT)")
(.exec db "CREATE TABLE IF NOT EXISTS messages (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, direction TEXT NOT NULL, message TEXT NOT NULL)")))
(defn reset-tables!
[^js db]
(when db
(.exec db "DROP TABLE IF EXISTS tx_log")
(.exec db "DROP TABLE IF EXISTS messages"))
(create-tables! db))
(defn gc!
[^js db]
(when db
(doseq [table ["tx_log" "messages"]]
(try
(.exec db (str "DELETE FROM " table " WHERE id <= (SELECT id FROM " table " ORDER BY id DESC LIMIT 1 OFFSET 10000)"))
(catch :default e
(log/error :rtc-debug-log-gc-failed {:table table :error e}))))
(.exec db "VACUUM")))
(defn- safe-str
[value]
(try
(pr-str value)
(catch :default _
(str value))))
(defn- insert!
[^js db sql params]
(try
(.exec db #js {:sql sql
:bind (clj->js params)})
(catch :default e
(log/error :rtc-debug-log-insert-failed e))))
(defn log-tx!
[repo tx-data tx-meta]
(when repo
(when-let [db (worker-state/get-sqlite-conn repo :debug-log)]
(insert! db
"INSERT INTO tx_log (tx_data, tx_meta) VALUES (?1, ?2)"
[(safe-str tx-data) (safe-str tx-meta)])
(log/debug :log-tx tx-meta))))
(defn log-ws-message!
([direction message]
(log-ws-message! (worker-state/get-current-repo) direction message))
([repo direction message]
(when (and repo message)
(when-let [db (worker-state/get-sqlite-conn repo :debug-log)]
(insert! db
"INSERT INTO messages (direction, message) VALUES (?1, ?2)"
[(name direction) (str message)])))))

View File

@@ -1,64 +0,0 @@
(ns frontend.worker.rtc.exception
"Exception list"
(:require [logseq.common.defkeywords :refer [defkeywords]])
(:import [missionary Cancelled]))
(defkeywords
:rtc.exception/ws-already-disconnected {:doc "Remote exception. current websocket conn is already disconnected and deleted by remote."}
:rtc.exception/remote-graph-not-exist {:doc "Remote exception. e.g. push client-updates to a deleted graph."}
:rtc.exception/remote-graph-not-ready {:doc "Remote exception. Remote graph is still creating."}
:rtc.exception/remote-graph-lock-missing {:doc "
Remote exception. Failed to remote graph lock isn't exist.
It's a server internal error, shouldn't happen."}
:rtc.exception/invalid-token {:doc "Local exception"}
:rtc.exception/not-rtc-graph {:doc "Local exception. Trying to start rtc loop on a local-graph."}
:rtc.exception/lock-failed {:doc "Local exception.
Trying to start rtc loop but there's already one running, need to cancel that one first."}
:rtc.exception/not-found-db-conn {:doc "Local exception. Cannot find db-conn by repo"}
:rtc.exception/not-found-schema-version {:doc "Local exception. graph doesn't have :logseq.kv/schema-version value"}
:rtc.exception/not-found-remote-schema-version {:doc "Local exception.
graph doesn't have :logseq.kv/remote-schema-version value"}
:rtc.exception/major-schema-version-mismatched {:doc "Local exception.
local-schema-version, remote-schema-version, app-schema-version are not equal, cannot start rtc"}
:rtc.exception/local-graph-too-old {:doc "Local exception.
Local graph's tx is too old, need to pull earlier remote-data first"}
:rtc.exception/get-s3-object-failed {:doc "Failed to fetch response from s3.
When response from remote is too huge(> 32KB),
the server will put it to s3 and return its presigned-url to clients."}
:rtc.exception/bad-request-body {:doc "bad request body, rejected by server-schema"}
:rtc.exception/not-allowed {:doc "this api-call is not allowed"}
:rtc.exception/ws-timeout {:doc "websocket timeout"}
:rtc.exception/fetch-user-rsa-key-pair-error {:doc "Failed to fetch user RSA key pair from server"}
:rtc.exception/fetch-user-rsa-public-key-error {:doc "Failed to fetch user RSA public-key from server"}
:rtc.exception/fetch-graph-aes-key-error {:doc "Failed to fetch graph AES key from server"}
:rtc.exception/not-found-user-rsa-key-pair {:doc "user rsa-key-pair not found"}
:rtc.exception/not-found-graph-aes-key {:doc "graph aes-key not found"}
:rtc.exception/read-asset-failed {:doc "read asset from fs failed, maybe not exists"}
:rtc.exception/upload-asset-failed {:doc "upload asset failed"}
:rtc.exception/download-asset-failed {:doc "download asset failed"})
(def ex-remote-graph-lock-missing
(ex-info "remote graph lock missing(server internal error)"
{:type :rtc.exception/remote-graph-lock-missing}))
(def ex-local-not-rtc-graph
(ex-info "RTC is not supported for this local-graph" {:type :rtc.exception/not-rtc-graph}))
(def ex-unknown-server-error
(ex-info "Unknown server error" {:type :rtc.exception/unknown-server-error}))
(defn e->ex-info
[e]
(cond
(instance? Cancelled e) (ex-info "missionary.Cancelled" {:message (.-message e)})
(instance? js/CloseEvent e) (ex-info "js/CloseEvent" {:type (.-type e)})
;; m/race-failure
(and (instance? ExceptionInfo e)
(contains? (ex-data e) :missionary.core/errors))
(ex-info (ex-message e) (update (ex-data e) :missionary.core/errors (fn [errors] (map e->ex-info errors))))
:else e))

View File

@@ -1,638 +0,0 @@
(ns frontend.worker.rtc.full-upload-download-graph
"- upload local graph to remote
- download remote graph"
(:require [cljs-http-missionary.client :as http]
[clojure.set :as set]
[datascript.core :as d]
[frontend.common.crypt :as crypt]
[frontend.common.missionary :as c.m]
[frontend.common.thread-api :as thread-api]
[frontend.worker-common.util :as worker-util]
[frontend.worker.db-metadata :as worker-db-metadata]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.const :as rtc-const]
[frontend.worker.rtc.crypt :as rtc-crypt]
[frontend.worker.rtc.db :as rtc-db]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.rtc.ws-util :as ws-util]
[frontend.worker.shared-service :as shared-service]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db.frontend.malli-schema :as db-malli-schema]
[logseq.db.sqlite.util :as sqlite-util]
[logseq.outliner.pipeline :as outliner-pipeline]
[malli.core :as ma]
[malli.error :as me]
[malli.transform :as mt]
[missionary.core :as m]
[promesa.core :as p]))
(def ^:private normalized-remote-block-schema
"Blocks stored in remote have some differences in format from the client's.
Use this schema's coercer to decode."
[:map
[:db/id [:string {:decode/custom str}]]
[:db/ident {:optional true} :keyword]
[:block/uuid {:optional true} [:uuid {:decode/custom ldb/read-transit-str}]]
[:block/order {:optional true} db-malli-schema/block-order]
[:db/cardinality {:optional true} :keyword]
[:db/valueType {:optional true} :keyword]
[:db/index {:optional true} :boolean]
;; TODO: remove :block/name special custom-decode later
[:block/name {:optional true} [:string {:decode/custom
(fn [v] (try (ldb/read-transit-str v)
(catch :default _
(log/warn :non-transit-block-name v)
v)))}]]
[:malli.core/default [:map-of :keyword
[:any {:decode/custom
(fn [x] ; convert db-id to db-id-string(as temp-id)
(cond
(and (coll? x)
(every? :db/id x))
(map (comp str :db/id) x)
(:db/id x)
(str (:db/id x))
(string? x)
(ldb/read-transit-str x)
(and (coll? x)
(every? string? x))
(map ldb/read-transit-str x)
:else x))}]]]])
(def ^:private normalized-remote-blocks-coercer
(ma/coercer [:sequential normalized-remote-block-schema]
(mt/transformer {:name :custom} mt/string-transformer)
nil
#(ma/-fail! ::normalized-remote-block-schema (select-keys % [:value]))))
(defn- schema->ref-type-attrs
[db-schema]
(set
(keep
(fn [[attr-name attr-body-map]]
(when (= :db.type/ref (:db/valueType attr-body-map))
attr-name))
db-schema)))
(defn- schema->card-many-attrs
[db-schema]
(set
(keep
(fn [[attr-name attr-body-map]]
(when (= :db.cardinality/many (:db/cardinality attr-body-map))
attr-name))
db-schema)))
(defn- export-as-blocks
[db & {:keys [ignore-attr-set ignore-entity-set]}]
(let [datoms (d/datoms db :eavt)
db-schema (d/schema db)
card-many-attrs (schema->card-many-attrs db-schema)
ref-type-attrs (schema->ref-type-attrs db-schema)]
(->> datoms
(partition-by :e)
(keep (fn [datoms]
(when (seq datoms)
(reduce
(fn [r datom]
(when (and (contains? #{:block/parent} (:a datom))
(not (pos-int? (:v datom))))
(throw (ex-info "invalid block data" {:datom datom})))
(let [a (:a datom)]
(cond
(contains? ignore-attr-set a) r
(and (keyword-identical? :db/ident a)
(contains? ignore-entity-set (:v datom)))
(reduced nil)
:else
(let [card-many? (contains? card-many-attrs a)
ref? (contains? ref-type-attrs a)]
(case [ref? card-many?]
[true true]
(update r a conj (str (:v datom)))
[true false]
(assoc r a (str (:v datom)))
[false true]
(update r a conj (ldb/write-transit-str (:v datom)))
[false false]
(assoc r a (ldb/write-transit-str (:v datom))))))))
{:db/id (str (:e (first datoms)))}
datoms))))
(map (fn [block]
(cond-> block
(:db/ident block) (update :db/ident ldb/read-transit-str)
(:block/order block) (update :block/order ldb/read-transit-str)))))))
(defn- task--encrypt-blocks
[encrypt-key encrypt-attr-set blocks]
(m/sp
(loop [[block & rest-blocks] blocks
result []]
(if-not block
result
(let [block' (c.m/<? (crypt/<encrypt-map encrypt-key encrypt-attr-set block))]
(recur rest-blocks (conj result block')))))))
(comment
(def db @(frontend.worker.state/get-datascript-conn (frontend.worker.state/get-current-repo)))
(def blocks (export-as-blocks db))
(def salt (rtc-encrypt/gen-salt))
(def canceler ((m/sp
(let [k (c.m/<? (rtc-encrypt/<salt+password->key salt "password"))]
(m/? (task--encrypt-blocks k #{:block/title :block/name} blocks))))
#(def encrypted-blocks %) prn)))
(defn new-task--upload-graph
[get-ws-create-task repo conn remote-graph-name major-schema-version]
(m/sp
(rtc-log-and-state/rtc-log :rtc.log/upload {:sub-type :generate-aes-key
:message "generate aes-encrypt-key"})
(let [aes-key (m/? (rtc-crypt/task--generate-graph-aes-key))
user-uuid (some-> (worker-state/get-id-token)
worker-util/parse-jwt
:sub)
public-key (when user-uuid
(:public-key (m/? (rtc-crypt/task--fetch-user-rsa-key-pair get-ws-create-task user-uuid))))]
(when-not public-key
(throw (ex-info "user public-key not found" {:type :rtc.exception/not-found-user-rsa-key-pair
:user-uuid user-uuid})))
(let [encrypted-aes-key (c.m/<? (crypt/<encrypt-aes-key public-key aes-key))
_ (ldb/transact! conn [(ldb/kv :logseq.kv/graph-rtc-e2ee? true)])
_ (rtc-log-and-state/rtc-log :rtc.log/upload {:sub-type :fetching-presigned-put-url
:message "fetching presigned put-url"})
[{:keys [url key]} all-blocks-str]
(m/?
(m/join
vector
(ws-util/send&recv get-ws-create-task {:action "presign-put-temp-s3-obj"})
(m/sp
(let [all-blocks (export-as-blocks
@conn
:ignore-attr-set rtc-const/ignore-attrs-when-init-upload
:ignore-entity-set rtc-const/ignore-entities-when-init-upload)
encrypted-blocks (c.m/<? (task--encrypt-blocks aes-key rtc-const/encrypt-attr-set all-blocks))]
(ldb/write-transit-str encrypted-blocks)))))]
(rtc-log-and-state/rtc-log :rtc.log/upload {:sub-type :upload-data
:message "uploading data"})
(m/? (http/put url {:body all-blocks-str :with-credentials? false}))
(rtc-log-and-state/rtc-log :rtc.log/upload {:sub-type :request-upload-graph
:message "requesting upload-graph"})
(let [upload-resp
(m/? (ws-util/send&recv get-ws-create-task {:action "upload-graph"
:s3-key key
:schema-version (str major-schema-version)
:graph-name remote-graph-name
:encrypted-aes-key
(ldb/write-transit-str encrypted-aes-key)}))]
(if-let [graph-uuid (:graph-uuid upload-resp)]
(let [schema-version (ldb/get-graph-schema-version @conn)]
(ldb/transact! conn
[(ldb/kv :logseq.kv/graph-uuid graph-uuid)
(ldb/kv :logseq.kv/graph-local-tx "0")
(ldb/kv :logseq.kv/remote-schema-version schema-version)])
(client-op/update-graph-uuid repo graph-uuid)
(client-op/remove-local-tx repo)
(client-op/update-local-tx repo 1)
(client-op/add-all-exists-asset-as-ops repo)
(c.m/<? (worker-db-metadata/<store repo (pr-str {:kv/value graph-uuid})))
(m/? (rtc-crypt/task--persist-graph-encrypted-aes-key graph-uuid encrypted-aes-key))
(rtc-log-and-state/rtc-log :rtc.log/upload {:sub-type :upload-completed
:message "upload-graph completed"})
{:graph-uuid graph-uuid})
(throw (ex-info "upload-graph failed" {:upload-resp upload-resp}))))))))
(defn- fill-block-fields
[blocks]
(let [id->block (into {} (map (juxt :db/id identity) blocks))
*block->parent-block-cache (atom {})]
(letfn [(page-of-block-2 [block]
(or
(@*block->parent-block-cache block)
(when-let [parent-id (:block/parent block)]
(when-let [parent (id->block parent-id)]
(if (:block/name parent)
(do (swap! *block->parent-block-cache assoc block parent)
parent)
(page-of-block-2 parent))))))]
(let [groups (group-by #(boolean (:block/name %)) blocks)
other-blocks (set (get groups false))
block-id->page-id (into {} (map (fn [b] [(:db/id b) (:db/id (page-of-block-2 b))]) other-blocks))]
(mapv (fn [b]
(if-let [page-id (block-id->page-id (:db/id b))]
(assoc b :block/page page-id)
b))
blocks)))))
(defn- blocks->card-one-attrs
[blocks]
(set
(keep
(fn [block]
(when-let [db-ident (:db/ident block)]
(when (= :db.cardinality/one (:db/cardinality block))
db-ident)))
blocks)))
(defn- convert-card-one-value-from-value-coll
[card-one-attrs block]
(let [card-one-attrs-in-block (set/intersection (set (keys block)) card-one-attrs)]
(merge block
(update-vals (select-keys block card-one-attrs-in-block)
(fn [v]
(if (or (sequential? v)
(set? v))
(first v)
v))))))
(defn- transact-remote-schema-version!
[repo]
(when-let [conn (worker-state/get-datascript-conn repo)]
(let [db @conn]
(when-let [schema-version (:kv/value (d/entity db :logseq.kv/schema-version))]
(ldb/transact! conn
[(ldb/kv :logseq.kv/remote-schema-version schema-version)]
{:rtc-download-graph? true
:gen-undo-ops? false
:persist-op? false})))))
(defn- <transact-block-refs!
[repo graph-uuid]
(when-let [conn (worker-state/get-datascript-conn repo)]
(let [db @conn
;; get all the block datoms
datoms (d/datoms db :avet :block/uuid)
page-or-object?-memoized (memoize outliner-pipeline/page-or-object?-helper)
refs-tx (mapcat
(fn [d]
(let [block (d/entity db (:e d))
refs (outliner-pipeline/db-rebuild-block-refs
db block :page-or-object?-memoized page-or-object?-memoized)]
(map
(fn [ref]
[:db/add (:db/id block) :block/refs ref])
refs)))
datoms)]
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transact-graph-data-to-db-5
:message (str "transacting block-refs(" (count refs-tx) ")")
:graph-uuid graph-uuid})
(p/doseq [refs-tx* (partition-all 500 refs-tx)]
(ldb/transact! conn refs-tx* {:outliner-op :rtc-download-rebuild-block-refs
:rtc-download-graph? true})
(p/delay 10)))))
(defn- block->schema-map
[block]
(when-let [db-ident (:db/ident block)]
(let [value-type (:db/valueType block)
cardinality (:db/cardinality block)
db-index (:db/index block)]
(when (or value-type cardinality db-index)
(cond-> {:db/ident db-ident}
value-type (assoc :db/valueType value-type)
cardinality (assoc :db/cardinality cardinality)
db-index (assoc :db/index db-index))))))
(defn- blocks->schema-blocks+normal-blocks
[blocks]
(reduce
(fn [[schema-blocks normal-blocks] block]
(if-let [schema-block (block->schema-map block)]
(let [strip-schema-attrs-block (dissoc block :db/valueType :db/cardinality :db/index)]
[(conj schema-blocks schema-block) (conj normal-blocks strip-schema-attrs-block)])
[schema-blocks (conj normal-blocks block)]))
[[] []] blocks))
(defn- blocks-resolve-temp-id
[schema-blocks blocks]
(let [uuids (map :block/uuid blocks)
idents (map :db/ident blocks)
ids (map :db/id blocks)
id->uuid (zipmap ids uuids)
id->ident (zipmap ids idents)
id-tx-data (map (fn [id]
(let [uuid' (id->uuid id)
ident (id->ident id)]
(cond-> {:block/uuid uuid'}
ident
(assoc :db/ident ident)))) ids)
id-ref-exists? (fn [v] (and (string? v) (or (get id->ident v) (get id->uuid v))))
ref-k-set (set (keep (fn [b] (when (= :db.type/ref (:db/valueType b))
(:db/ident b)))
schema-blocks))
ref-k? (fn [k] (contains? ref-k-set k))
blocks-tx-data (map (fn [block]
(->> (map
(fn [[k v]]
(let [v
(if (ref-k? k)
(cond
(id-ref-exists? v)
(or (get id->ident v) [:block/uuid (get id->uuid v)])
(and (sequential? v) (every? id-ref-exists? v))
(map (fn [id] (or (get id->ident id) [:block/uuid (get id->uuid id)])) v)
:else
v)
v)]
[k v]))
(dissoc block :db/id))
(into {}))) blocks)]
(concat id-tx-data blocks-tx-data)))
(defn- remote-all-blocks=>client-blocks
[all-blocks ignore-attr-set ignore-entity-set]
(let [{:keys [_ _t blocks]} all-blocks
card-one-attrs (blocks->card-one-attrs blocks)
blocks1 (worker-util/profile :convert-card-one-value-from-value-coll
(map (partial convert-card-one-value-from-value-coll card-one-attrs) blocks))
blocks2 (try
(worker-util/profile :normalize-remote-blocks
(normalized-remote-blocks-coercer blocks1))
(catch :default e
(log/error :rtc-malli-coerce-failed e)
(prn :debug :coerce-errors
(me/humanize (get-in (ex-data e) [:data :explain]))
:data (:data (ex-data e)))
(throw e)))
blocks (sequence
(comp
;;TODO: remove this
;;client/schema already converted to :db/cardinality, :db/valueType by remote,
;;and :client/schema should be removed by remote too
(map #(dissoc % :client/schema))
(remove (fn [block] (contains? ignore-entity-set (:db/ident block))))
(map (fn [block]
(into {} (remove (comp (partial contains? ignore-attr-set) first)) block))))
blocks2)
blocks (fill-block-fields blocks)]
blocks))
(defn- remote-all-blocks->tx-data+t
"Return
{:remote-t ...
:init-tx-data ...
:tx-data ...}
init-tx-data - schema data and other init-data, need to be transacted first
tx-data - all other data"
[remote-all-blocks graph-uuid]
(let [t (:t remote-all-blocks)
blocks (remote-all-blocks=>client-blocks
remote-all-blocks
rtc-const/ignore-attrs-when-init-download
rtc-const/ignore-entities-when-init-download)
[schema-blocks normal-blocks] (blocks->schema-blocks+normal-blocks blocks)
tx-data (concat
(blocks-resolve-temp-id schema-blocks normal-blocks)
[(ldb/kv :logseq.kv/graph-uuid graph-uuid)])
init-tx-data (cons (ldb/kv :logseq.kv/db-type "db") schema-blocks)]
{:remote-t t
:init-tx-data init-tx-data
:tx-data tx-data}))
(defn- task--decrypt-blocks-aux
[aes-key encrypt-attr-set blocks]
(m/sp
(loop [[block & rest-blocks] blocks
result []]
(if-not block
result
(let [block* (c.m/<? (crypt/<decrypt-map aes-key encrypt-attr-set block))]
(recur rest-blocks (conj result block*)))))))
(defn- task--decrypt-blocks
[graph-uuid blocks]
(m/sp
(let [token (worker-state/get-id-token)
user-uuid (:sub (worker-util/parse-jwt token))
_ (assert (and token user-uuid))
{:keys [get-ws-create-task]}
(ws-util/gen-get-ws-create-map--memoized (ws-util/get-ws-url token))
aes-key (m/? (rtc-crypt/task--get-aes-key get-ws-create-task user-uuid graph-uuid))]
(if aes-key
(m/? (task--decrypt-blocks-aux aes-key rtc-const/encrypt-attr-set blocks))
blocks))))
(defn- new-task--transact-remote-all-blocks!
[all-blocks repo graph-uuid]
(let [{:keys [remote-t init-tx-data tx-data]}
(remote-all-blocks->tx-data+t all-blocks graph-uuid)]
(m/sp
(rtc-log-and-state/clean-cached-graph-local-and-remote-t graph-uuid)
(rtc-log-and-state/update-local-t graph-uuid remote-t)
(rtc-log-and-state/update-remote-t graph-uuid remote-t)
(c.m/<?
(p/do!
((@thread-api/*thread-apis :thread-api/create-or-open-db) repo {:close-other-db? false})
(client-op/update-local-tx repo remote-t)
((@thread-api/*thread-apis :thread-api/export-db) repo)
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transact-graph-data-to-db-1
:message (str "transacting init data(" (count init-tx-data) ")")
:graph-uuid graph-uuid})
((@thread-api/*thread-apis :thread-api/transact)
repo init-tx-data
{:rtc-download-graph? true
:gen-undo-ops? false
:persist-op? false}
(worker-state/get-context))
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transact-graph-data-to-db-2
:message (str "transacting other data(" (count tx-data) ")")
:graph-uuid graph-uuid})
(p/doseq [tx-data* (partition-all 500 tx-data)]
((@thread-api/*thread-apis :thread-api/transact)
repo tx-data* {:rtc-download-graph? true
:gen-undo-ops? false
:persist-op? false} (worker-state/get-context))
(p/delay 10))
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transact-graph-data-to-db-3
:message "transacting remote schema version"
:graph-uuid graph-uuid})
(transact-remote-schema-version! repo)
(<transact-block-refs! repo graph-uuid)))
(shared-service/broadcast-to-clients! :add-repo {:repo repo}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;
;; async download-graph ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn new-task--request-download-graph
[get-ws-create-task graph-uuid schema-version]
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :request-download-graph
:message "requesting download graph"
:graph-uuid graph-uuid
:schema-version schema-version})
(m/join :download-info-uuid
(ws-util/send&recv get-ws-create-task {:action "download-graph"
:graph-uuid graph-uuid
:schema-version (str schema-version)})))
(comment
(defn new-task--download-info-list
[get-ws-create-task graph-uuid schema-version]
(m/join :download-info-list
(ws-util/send&recv get-ws-create-task {:action "download-info-list"
:graph-uuid graph-uuid
:schema-version (str schema-version)}))))
(defn new-task--wait-download-info-ready
[get-ws-create-task download-info-uuid graph-uuid schema-version timeout-ms]
(->
(m/sp
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :wait-remote-graph-data-ready
:message "waiting for the remote to prepare the data"
:graph-uuid graph-uuid})
(loop []
(m/? (m/sleep 3000))
(let [{:keys [download-info-list]}
(m/? (ws-util/send&recv get-ws-create-task {:action "download-info-list"
:graph-uuid graph-uuid
:schema-version (str schema-version)}))]
(if-let [found-download-info
(some
(fn [download-info]
(when (and (= download-info-uuid (:download-info-uuid download-info))
(:download-info-s3-url download-info))
download-info))
download-info-list)]
found-download-info
(recur)))))
(m/timeout timeout-ms :timeout)))
(defn new-task--download-graph-from-s3
[graph-uuid graph-name s3-url]
(let [graph-uuid (if (string? graph-uuid) (parse-uuid graph-uuid) graph-uuid)]
(m/sp
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :downloading-graph-data
:message "downloading graph data"
:graph-uuid graph-uuid})
(let [{:keys [status body] :as r} (m/? (http/get s3-url {:with-credentials? false}))
repo (str sqlite-util/db-version-prefix graph-name)]
(if (not= 200 status)
(throw (ex-info "download-graph from s3 failed" {:resp r}))
(do
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transact-graph-data-to-db
:message "transacting graph data to local db"
:graph-uuid graph-uuid})
(let [all-blocks (ldb/read-transit-str body)
blocks (:blocks all-blocks)
e2ee-graph? (boolean (some (fn [block] (= :logseq.kv/graph-rtc-e2ee? (:db/ident block))) blocks))
blocks* (if e2ee-graph? (m/? (task--decrypt-blocks graph-uuid blocks)) blocks)
all-blocks* (assoc all-blocks :blocks blocks*)]
(worker-state/set-rtc-downloading-graph! true)
(m/? (new-task--transact-remote-all-blocks! all-blocks* repo graph-uuid))
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :transacted-all-blocks
:message "transacted all blocks"
:graph-uuid graph-uuid})
(client-op/update-graph-uuid repo graph-uuid)
(c.m/<? (worker-db-metadata/<store repo (pr-str {:kv/value graph-uuid})))
(worker-state/set-rtc-downloading-graph! false)
(rtc-log-and-state/rtc-log :rtc.log/download {:sub-type :download-completed
:message "download completed"
:graph-uuid graph-uuid})
nil)))))))
(defn new-task--branch-graph
[get-ws-create-task repo conn graph-uuid major-schema-version]
(m/sp
(rtc-log-and-state/rtc-log :rtc.log/branch-graph {:sub-type :fetching-presigned-put-url
:message "fetching presigned put-url"})
(rtc-db/remove-rtc-data-in-conn! repo)
(let [[{:keys [url key]} all-blocks-str]
(m/?
(m/join
vector
(ws-util/send&recv get-ws-create-task {:action "presign-put-temp-s3-obj"})
(m/sp
(let [all-blocks (export-as-blocks
@conn
:ignore-attr-set rtc-const/ignore-attrs-when-init-upload
:ignore-entity-set rtc-const/ignore-entities-when-init-upload)]
(ldb/write-transit-str all-blocks)))))]
(rtc-log-and-state/rtc-log :rtc.log/branch-graph {:sub-type :upload-data
:message "uploading data"})
(m/? (http/put url {:body all-blocks-str :with-credentials? false}))
(rtc-log-and-state/rtc-log :rtc.log/branch-graph {:sub-type :request-branch-graph
:message "requesting branch-graph"})
(let [resp (m/? (ws-util/send&recv get-ws-create-task {:action "branch-graph"
:s3-key key
:schema-version (str major-schema-version)
:graph-uuid graph-uuid}))]
(if-let [graph-uuid (:graph-uuid resp)]
(let [schema-version (ldb/get-graph-schema-version @conn)]
(ldb/transact! conn
[(ldb/kv :logseq.kv/graph-uuid graph-uuid)
(ldb/kv :logseq.kv/graph-local-tx "0")
(ldb/kv :logseq.kv/remote-schema-version schema-version)])
(client-op/update-graph-uuid repo graph-uuid)
(client-op/remove-local-tx repo)
(client-op/add-all-exists-asset-as-ops repo)
(c.m/<? (worker-db-metadata/<store repo (pr-str {:kv/value graph-uuid})))
(rtc-log-and-state/rtc-log :rtc.log/branch-graph {:sub-type :completed
:message "branch-graph completed"})
nil)
(throw (ex-info "branch-graph failed" {:upload-resp resp})))))))
(comment
(do
(def repo "logseq_db_test-transact-huge-graph")
(def debug-transit (shadow.resource/inline "debug2.transit"))
(def all-blocks (ldb/read-transit-str debug-transit)))
(let [{:keys [remote-t init-tx-data tx-data]}
(time (remote-all-blocks->tx-data+t all-blocks "36203c0d-c861-4ce0-a6ba-e355e7750989"))]
(def init-tx-data init-tx-data)
(def tx-data tx-data))
(p/do!
(prn :xxx1 (js/Date.))
((@thread-api/*thread-apis :thread-api/create-or-open-db) repo {:close-other-db? false})
(prn :xxx2 (js/Date.))
((@thread-api/*thread-apis :thread-api/transact)
repo init-tx-data
{:rtc-download-graph? true
:gen-undo-ops? false
:persist-op? false}
(worker-state/get-context))
(prn :xxx3 (js/Date.))
(p/doseq [tx-data* (partition-all 500 tx-data)]
((@thread-api/*thread-apis :thread-api/transact)
repo tx-data* {:rtc-download-graph? true
:gen-undo-ops? false
:persist-op? false} (worker-state/get-context))
(p/delay 10))
(prn :xxx4 (js/Date.))
(transact-remote-schema-version! repo)
(prn :xxx5 (js/Date.))
(<transact-block-refs! repo nil)
(prn :xxx6 (js/Date.)))
(p/do!
((@thread-api/*thread-apis :thread-api/unsafe-unlink-db) repo)))
(comment
(let [db @(frontend.worker.state/get-datascript-conn (frontend.worker.state/get-current-repo))
datoms (d/datoms db :avet :block/uuid)
page-or-object?-memoized (memoize outliner-pipeline/page-or-object?-helper)
refs-tx
(time
(vec
(mapcat
(fn [d]
(let [block (d/entity db (:e d))
refs (outliner-pipeline/db-rebuild-block-refs
db block :page-or-object?-memoized page-or-object?-memoized)]
(map
(fn [ref]
[:db/add (:db/id block) :block/refs ref])
refs)))
datoms)))]
(prn ::count (count refs-tx))
;; (prn ::take-20 (take 20 (sort-by second > (into [] (frequencies (map last refs-tx))))))
))

View File

@@ -1,240 +0,0 @@
(ns frontend.worker.rtc.gen-client-op
"Generate client-ops from entities/datoms"
(:require [clojure.string :as string]
[datascript.core :as d]
[frontend.worker.rtc.const :as rtc-const]
[logseq.db :as ldb]
[logseq.db.frontend.property :as db-property]))
(defn remove-conflict-same-block-datoms
"remove conflict entity-datoms for same-block(same block/uuid) in same-entity-datoms-coll."
[same-entity-datoms-coll]
(let [entity-info (map (fn [datoms]
(let [first-datom (first datoms)
e (nth first-datom 0)
t (nth first-datom 3)
uuid-datom (some (fn [d]
(when (keyword-identical? :block/uuid (nth d 1))
d))
datoms)
uuid (when uuid-datom (nth uuid-datom 2))
added? (when uuid-datom (nth uuid-datom 4))]
{:e e :t t :uuid uuid :added? added? :datoms datoms}))
same-entity-datoms-coll)
uuid-groups (group-by :uuid (filter :uuid entity-info))
loser-eids (reduce
(fn [acc [_uuid infos]]
(let [t-groups (group-by :t infos)]
(reduce
(fn [acc* [_t infos*]]
(if (> (count infos*) 1)
(let [sorted-infos (sort-by (fn [x] [(if (:added? x) 1 0) (:e x)])
(fn [a b] (compare b a))
infos*)
losers (rest sorted-infos)]
(into acc* (map :e losers)))
acc*))
acc
t-groups)))
#{}
uuid-groups)]
(if (seq loser-eids)
(map :datoms (remove #(contains? loser-eids (:e %)) entity-info))
same-entity-datoms-coll)))
(defn group-datoms-by-entity
"Groups transaction datoms by entity and returns a map of entity-id to datoms."
[tx-data]
(let [datom-vec-coll (map vec tx-data)
id->same-entity-datoms (group-by first datom-vec-coll)
id-order (distinct (map first datom-vec-coll))
same-entity-datoms-coll (map id->same-entity-datoms id-order)
same-entity-datoms-coll (remove-conflict-same-block-datoms same-entity-datoms-coll)]
{:same-entity-datoms-coll same-entity-datoms-coll
:id->same-entity-datoms id->same-entity-datoms}))
(defn- latest-add?->v->t
[add?->v->t]
(let [latest-add (first (sort-by second > (seq (add?->v->t true))))
latest-retract (first (sort-by second > (seq (add?->v->t false))))]
(cond
(nil? latest-add) {false (conj {} latest-retract)}
(nil? latest-retract) {true (conj {} latest-add)}
(= (second latest-add) (second latest-retract)) {true (conj {} latest-add)
false (conj {} latest-retract)}
(> (second latest-add) (second latest-retract)) {true (conj {} latest-add)}
:else {false (conj {} latest-retract)})))
(def ^:private watched-attrs
#{:block/title :block/created-at :block/updated-at :block/alias
:block/tags :block/link :block/journal-day
:logseq.property/classes :logseq.property/value
:db/index :db/valueType :db/cardinality})
(def ^:private watched-attr-ns
(conj db-property/logseq-property-namespaces "logseq.class"))
(defn- watched-attr?
[attr]
(or (contains? watched-attrs attr)
(let [ns (namespace attr)]
(or (contains? watched-attr-ns ns)
(string/ends-with? ns ".property")
(string/ends-with? ns ".class")))))
(defn- ref-attr?
[db attr]
(= :db.type/ref (get-in (d/schema db) [attr :db/valueType])))
(defn- update-op-av-coll
[db-before db-after a->add?->v->t]
(mapcat
(fn [[a add?->v->t]]
(mapcat
(fn [[add? v->t]]
(keep
(fn [[v t]]
(let [ref? (ref-attr? db-after a)]
(case [add? ref?]
[true true]
(when-let [v-uuid (:block/uuid (d/entity db-after v))]
[a v-uuid t add?])
[false true]
(when-let [v-uuid (:block/uuid
(or (d/entity db-after v)
(d/entity db-before v)))]
[a v-uuid t add?])
([true false] [false false]) [a (ldb/write-transit-str v) t add?])))
v->t))
add?->v->t))
a->add?->v->t))
(defn- redundant-update-op-av-coll?
[av-coll]
(every? (fn [av] (keyword-identical? :block/updated-at (first av))) av-coll))
(defn- max-t
[a->add?->v->t]
(apply max (mapcat vals (mapcat vals (vals a->add?->v->t)))))
(defn- get-first-vt
[add?->v->t k]
(some-> add?->v->t (get k) first))
(defn- entity-datoms=>ops
[db-before db-after e->a->add?->v->t ignore-attr-set entity-datoms]
(let [e (ffirst entity-datoms)
entity (d/entity db-after e)
{block-uuid :block/uuid} entity
a->add?->v->t (e->a->add?->v->t e)
{add?->block-name->t :block/name
add?->block-title->t :block/title
add?->block-uuid->t :block/uuid
add?->block-parent->t :block/parent
add?->block-order->t :block/order}
a->add?->v->t
[retract-block-uuid t1] (some-> add?->block-uuid->t (get false) first)
[add-block-uuid t-uuid] (some-> add?->block-uuid->t (get true) first)
[retract-block-name _] (some-> add?->block-name->t (get false) first)
[add-block-name t2] (some-> add?->block-name->t latest-add?->v->t (get-first-vt true))
[add-block-title t3] (some-> add?->block-title->t latest-add?->v->t (get-first-vt true))
[add-block-parent t4] (some-> add?->block-parent->t latest-add?->v->t (get-first-vt true))
[add-block-order t5] (some-> add?->block-order->t latest-add?->v->t (get-first-vt true))
a->add?->v->t* (into {}
(filter
(fn [[a _]]
(and (watched-attr? a)
(not (contains? ignore-attr-set a)))))
a->add?->v->t)]
(cond
(and retract-block-uuid retract-block-name)
[[:remove-page t1 {:block-uuid retract-block-uuid}]]
retract-block-uuid
[[:remove t1 {:block-uuid retract-block-uuid}]]
add-block-uuid
(let [av-coll (update-op-av-coll db-before db-after a->add?->v->t*)
add-op [:add (or t-uuid t4 t5) {:block-uuid block-uuid :av-coll av-coll}]]
(if (or add-block-name
(and (ldb/page? entity) add-block-title))
[[:update-page (or t2 t3) {:block-uuid block-uuid}] add-op]
[add-op]))
:else
(let [ops (cond-> []
(or add-block-parent add-block-order)
(conj [:move (or t4 t5) {:block-uuid block-uuid}])
(or add-block-name
(and (ldb/page? entity) add-block-title))
(conj [:update-page (or t2 t3) {:block-uuid block-uuid}]))
update-op (when-let [av-coll (not-empty (update-op-av-coll db-before db-after a->add?->v->t*))]
(when-not (redundant-update-op-av-coll? av-coll)
(let [t (max-t a->add?->v->t*)]
[:update t {:block-uuid block-uuid :av-coll av-coll}])))]
(cond-> ops update-op (conj update-op))))))
(defn entity-datoms=>a->add?->v->t
[entity-datoms]
(reduce
(fn [m datom]
(let [[_e a v t add?] datom]
(assoc-in m [a add? v] t)))
{} entity-datoms))
(defn generate-rtc-ops
[db-before db-after same-entity-datoms-coll e->a->add?->v->t]
(mapcat
(partial entity-datoms=>ops
db-before db-after e->a->add?->v->t rtc-const/ignore-attrs-when-syncing)
same-entity-datoms-coll))
(defn- generate-rtc-ops-from-entities
[ents]
(when (seq ents)
(let [db (d/entity-db (first ents))
id->same-entity-datoms
(into {}
(map (fn [ent]
(let [e (:db/id ent)
datoms (d/datoms db :eavt e)]
[e datoms])))
ents)
e->a->add?->v->t (update-vals id->same-entity-datoms entity-datoms=>a->add?->v->t)]
(generate-rtc-ops db db (vals id->same-entity-datoms) e->a->add?->v->t))))
(defn generate-rtc-ops-from-entities+parents
"generate ents and their parents as rtc-ops"
[ents]
(let [ents*
(set
(mapcat
(fn [ent]
(take 20 (take-while some? (iterate :block/parent ent))))
ents))]
(generate-rtc-ops-from-entities ents*)))
(defn generate-rtc-ops-from-property-entities
[property-ents]
(when (seq property-ents)
(assert (every? ldb/property? property-ents))
(generate-rtc-ops-from-entities property-ents)))
(defn generate-rtc-ops-from-class-entities
[class-ents]
(when (seq class-ents)
(assert (every? ldb/class? class-ents))
(generate-rtc-ops-from-entities class-ents)))
(defn generate-rtc-rename-db-ident-ops
[rename-db-idents]
(assert (every? (fn [{:keys [db-ident-or-block-uuid new-db-ident]}]
(and (or (keyword? db-ident-or-block-uuid) (uuid? db-ident-or-block-uuid))
(keyword? new-db-ident)))
rename-db-idents)
rename-db-idents)
(map
(fn [{:keys [db-ident-or-block-uuid new-db-ident]}]
[:rename-db-ident 0 {:db-ident-or-block-uuid db-ident-or-block-uuid :new-db-ident new-db-ident}])
rename-db-idents))

View File

@@ -1,28 +0,0 @@
(ns frontend.worker.rtc.hash
"Calculate the hash of the blocks to compare whether
these blocks are consistent with the remote.")
(defn- block-entity->str
"Convert block-entity to a string(concat by values selected by :selected-keys).
'selected-keys': it's a ordered coll"
[selected-keys block-entity]
(->> selected-keys
(reduce
(fn [r k-or-ks]
(let [v (if (sequential? k-or-ks)
(get-in block-entity k-or-ks)
(get block-entity k-or-ks))]
(if (nil? v)
(reduced nil)
(conj r v))))
[])
(apply str)))
(defn hash-blocks
[block-entities & {:keys [selected-keys]
:or {selected-keys [:block/uuid
[:block/left :block/uuid]
[:block/parent :block/uuid]]}}]
(let [str-set (set (keep (partial block-entity->str selected-keys) block-entities))]
{:hash (hash str-set)
:count (count str-set)}))

View File

@@ -1,121 +0,0 @@
(ns frontend.worker.rtc.log-and-state
"Fns to generate rtc related logs"
(:require [frontend.common.missionary :as c.m]
[frontend.worker.shared-service :as shared-service]
[lambdaisland.glogi :as log]
[logseq.common.defkeywords :refer [defkeywords]]
[malli.core :as ma]
[missionary.core :as m]))
(def ^:private *rtc-log (atom nil))
(def ^:private rtc-log-type-schema
(vec
(concat
[:enum]
(take-nth
2
(defkeywords
:rtc.log/upload {:doc "rtc log type for upload-graph."}
:rtc.log/download {:doc "rtc log type for upload-graph."}
:rtc.log/cancelled {:doc "rtc has been cancelled"}
:rtc.log/apply-remote-update {:doc "apply remote updates to local graph"}
:rtc.log/pull-remote-data {:doc "pull remote updates"}
:rtc.log/push-local-update {:doc "push local updates to remote graph"}
:rtc.log/higher-remote-schema-version-exists {:doc "remote-graph with larger schema-version exists"}
:rtc.log/branch-graph {:doc "rtc log type for creating a new graph branch"}
:rtc.asset.log/cancelled {:doc "rtc asset sync has been cancelled"}
:rtc.asset.log/upload-assets {:doc "upload local assets to remote"}
:rtc.asset.log/download-assets {:doc "download assets from remote"}
:rtc.asset.log/remove-assets {:doc "remove remote assets"}
:rtc.asset.log/asset-too-large {:doc "asset is too large to upload"}
:rtc.asset.log/initial-download-missing-assets {:doc "download assets if not exists in rtc-asset-sync initial phase"})))))
(def ^:private rtc-log-type-validator (ma/validator rtc-log-type-schema))
(defn rtc-log
[type m]
{:pre [(map? m) (rtc-log-type-validator type)]}
(reset! *rtc-log (assoc m :type type :created-at (js/Date.)))
nil)
;;; some other states
(def ^:private graph-uuid->t-schema
[:map-of :uuid :int])
(def ^:private graph-uuid->t-validator (let [validator (ma/validator graph-uuid->t-schema)]
(fn [v]
(if (validator v)
true
(do (log/error :debug-graph-uuid->t-validator v)
false)))))
(def *graph-uuid->local-t (atom {} :validator graph-uuid->t-validator))
(def *graph-uuid->remote-t (atom {} :validator graph-uuid->t-validator))
(defn- ensure-uuid
[v]
(cond
(uuid? v) v
(string? v) (uuid v)
:else (throw (ex-info "illegal value" {:data v}))))
(defn- create-local-t-flow
[graph-uuid]
(->> (m/watch *graph-uuid->local-t)
(m/eduction (keep (fn [m] (get m (ensure-uuid graph-uuid)))))
c.m/continue-flow))
(defn- create-remote-t-flow
[graph-uuid]
(->> (m/watch *graph-uuid->remote-t)
(m/eduction (keep (fn [m] (get m (ensure-uuid graph-uuid)))))
c.m/continue-flow))
(defn create-local&remote-t-flow
"ensure local-t <= remote-t"
[graph-uuid]
(assert (some? graph-uuid))
(->> (m/latest vector (create-local-t-flow graph-uuid) (create-remote-t-flow graph-uuid))
(m/eduction (filter (fn [[local-t remote-t]] (>= remote-t local-t))))))
(defn clean-cached-graph-local-and-remote-t
[graph-uuid]
(let [graph-uuid (ensure-uuid graph-uuid)]
(swap! *graph-uuid->local-t dissoc graph-uuid)
(swap! *graph-uuid->remote-t dissoc graph-uuid)))
(defn update-local-t
[graph-uuid local-t]
(let [graph-uuid (ensure-uuid graph-uuid)
current-remote-t (get @*graph-uuid->remote-t graph-uuid)
current-local-t (get @*graph-uuid->local-t graph-uuid)]
(when (and current-remote-t current-local-t)
(assert (and (>= local-t current-local-t) (<= local-t current-remote-t))
{:local-t local-t
:current-local-t current-local-t
:current-remote-t current-remote-t}))
(swap! *graph-uuid->local-t assoc graph-uuid local-t)))
(defn update-remote-t
[graph-uuid remote-t]
(let [graph-uuid (ensure-uuid graph-uuid)
current-remote-t (get @*graph-uuid->remote-t graph-uuid)
current-local-t (get @*graph-uuid->local-t graph-uuid)]
(when (and current-remote-t current-local-t)
(assert (and remote-t (>= remote-t current-remote-t) (>= remote-t current-local-t))
{:remote-t remote-t
:current-local-t current-local-t
:current-remote-t current-remote-t}))
(swap! *graph-uuid->remote-t assoc graph-uuid remote-t)))
;;; subscribe-logs, push to frontend
;;; TODO: refactor by using c.m/run-background-task
(defn- subscribe-logs
[]
(remove-watch *rtc-log :subscribe-logs)
(add-watch *rtc-log :subscribe-logs
(fn [_ _ _ n] (when n (shared-service/broadcast-to-clients! :rtc-log n)))))
(subscribe-logs)

View File

@@ -1,43 +0,0 @@
(ns frontend.worker.rtc.migrate
"migrate server data according to schema-version and client's migration-updates"
(:require [datascript.core :as d]
[frontend.worker.rtc.gen-client-op :as gen-client-op]))
(def apply-conj (partial apply conj))
(defn migration-results=>client-ops
[{:keys [_from-version to-version upgrade-result-coll] :as _migration-result}]
(when to-version
(let [client-ops
(mapcat
(fn [{:keys [tx-data db-before db-after migrate-updates]}]
(let [*tx-data (atom [])]
(when-let [rename-db-idents (:rename-db-idents migrate-updates)]
(swap! *tx-data apply-conj
(gen-client-op/generate-rtc-rename-db-ident-ops rename-db-idents)))
(when (:fix migrate-updates)
(let [{:keys [same-entity-datoms-coll id->same-entity-datoms]}
(gen-client-op/group-datoms-by-entity tx-data)
e->a->add?->v->t
(update-vals
id->same-entity-datoms
gen-client-op/entity-datoms=>a->add?->v->t)]
(swap! *tx-data apply-conj
(gen-client-op/generate-rtc-ops
db-before db-after same-entity-datoms-coll e->a->add?->v->t))))
(let [property-ks (seq (:properties migrate-updates))
class-ks (:classes migrate-updates)
d-entity-fn (partial d/entity db-after)
new-property-entities (keep d-entity-fn property-ks)
new-class-entities (keep d-entity-fn class-ks)]
(swap! *tx-data apply-conj
(concat (gen-client-op/generate-rtc-ops-from-property-entities new-property-entities)
(gen-client-op/generate-rtc-ops-from-class-entities new-class-entities))))
@*tx-data))
upgrade-result-coll)
max-t (apply max 0 (map second client-ops))]
(conj (vec client-ops)
[:update-kv-value
max-t
{:db-ident :logseq.kv/schema-version
:value to-version}]))))

View File

@@ -1,656 +0,0 @@
(ns frontend.worker.rtc.remote-update
"Fns about applying remote updates"
(:require [clojure.data :as data]
[clojure.set :as set]
[clojure.string :as string]
[datascript.core :as d]
[frontend.common.crypt :as crypt]
[frontend.common.missionary :as c.m]
[frontend.worker-common.util :as worker-util]
[frontend.worker.handler.page :as worker-page]
[frontend.worker.rtc.asset :as r.asset]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.const :as rtc-const]
[frontend.worker.rtc.log-and-state :as rtc-log-and-state]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[logseq.clj-fractional-indexing :as index]
[logseq.common.util :as common-util]
[logseq.db :as ldb]
[logseq.db.frontend.property :as db-property]
[logseq.outliner.core :as outliner-core]
[logseq.outliner.transaction :as outliner-tx]
[missionary.core :as m]))
(defmulti ^:private transact-db! (fn [action & _args] action))
(defn- block-reuse-db-id
[block]
(if-let [old-eid (@worker-state/*deleted-block-uuid->db-id (:block/uuid block))]
(assoc block
:db/id old-eid
:block.temp/use-old-db-id? true)
block))
(defmethod transact-db! :delete-blocks [_ & args]
(outliner-tx/transact!
{:persist-op? false
:gen-undo-ops? false
:outliner-op :delete-blocks
:transact-opts {:conn (first args)}}
(apply outliner-core/delete-blocks! args)))
(defmethod transact-db! :move-blocks [_ & args]
(outliner-tx/transact!
{:persist-op? false
:gen-undo-ops? false
:outliner-op :move-blocks
:transact-opts {:conn (first args)}}
(apply outliner-core/move-blocks! args)))
(defmethod transact-db! :update-block-order-directly [_ conn block-uuid block-parent-uuid block-order]
;; transact :block/parent and :block/order directly,
;; check :block/order has any conflicts with other blocks
(let [parent-ent (when block-parent-uuid (d/entity @conn [:block/uuid block-parent-uuid]))
sorted-order+block-uuid-coll (sort-by first (map (juxt :block/order :block/uuid) (:block/_parent parent-ent)))
block-order*
(if-let [[start-order end-order]
(reduce
(fn [[start-order] [current-order current-block-uuid]]
(when start-order
(if (= current-block-uuid block-uuid)
(reduced nil)
(reduced [start-order current-order])))
(let [compare-order (compare current-order block-order)]
(cond
(and (zero? compare-order)
(not= current-block-uuid block-uuid))
;; found conflict order
[current-order nil]
(and (zero? compare-order)
(= current-block-uuid block-uuid))
;; this block already has expected :block/order
(reduced nil)
(pos? compare-order) ;not found conflict order
(reduced nil)
(neg? compare-order)
nil)))
nil sorted-order+block-uuid-coll)]
(index/generate-key-between start-order end-order)
block-order)]
(ldb/transact! conn [{:block/uuid block-uuid :block/order block-order*}]
{:rtc-op? true
:persist-op? false
:gen-undo-ops? false})
;; TODO: add ops when block-order* != block-order
))
(defmethod transact-db! :move-blocks&persist-op [_ & args]
(outliner-tx/transact!
{:persist-op? true
:gen-undo-ops? false
:outliner-op :move-blocks
:transact-opts {:conn (first args)}}
(apply outliner-core/move-blocks! args)))
(defmethod transact-db! :insert-blocks [_ conn blocks target opts]
(outliner-tx/transact!
{:persist-op? false
:gen-undo-ops? false
:outliner-op :insert-blocks
:transact-opts {:conn conn}}
(let [opts' (assoc opts :keep-block-order? true)
blocks' (map block-reuse-db-id blocks)]
(outliner-core/insert-blocks! conn blocks' target opts')))
(doseq [block blocks]
(assert (some? (d/entity @conn [:block/uuid (:block/uuid block)]))
{:msg "insert-block failed"
:block block
:target target})))
(defmethod transact-db! :insert-no-order-blocks [_ conn block-uuid+parent-coll]
(ldb/transact! conn
(mapv (fn [[block-uuid block-parent]]
;; add block/content block/format to satisfy the normal-block schema
(cond-> {:block/uuid block-uuid}
block-parent (assoc :block/parent [:block/uuid block-parent])))
block-uuid+parent-coll)
{:persist-op? false
:gen-undo-ops? false
:rtc-op? true}))
(defn- apply-remote-remove-ops-helper
[conn remove-ops]
(let [block-uuid->entity (into {}
(keep
(fn [op]
(when-let [block-uuid (:block-uuid op)]
(when-let [ent (d/entity @conn [:block/uuid block-uuid])]
[block-uuid ent])))
remove-ops))
block-uuid-set (set (keys block-uuid->entity))
block-uuids-need-move
(set
(mapcat
(fn [[_block-uuid ent]]
(set/difference (set (map :block/uuid (:block/_parent ent))) block-uuid-set))
block-uuid->entity))]
{:block-uuids-need-move block-uuids-need-move
:block-uuids-to-remove block-uuid-set}))
(defn- apply-remote-remove-ops
[conn other-ops]
(let [{:keys [block-uuids-need-move block-uuids-to-remove]}
(apply-remote-remove-ops-helper conn other-ops)]
;; move to page-block's first child
(doseq [block-uuid block-uuids-need-move]
(when-let [b (d/entity @conn [:block/uuid block-uuid])]
(when-let [target-b
(d/entity @conn (:db/id (:block/page (d/entity @conn [:block/uuid block-uuid]))))]
(transact-db! :move-blocks&persist-op conn [b] target-b {:sibling? false}))))
(let [deleting-blocks (keep (fn [block-uuid]
(d/entity @conn [:block/uuid block-uuid]))
block-uuids-to-remove)]
(when (seq deleting-blocks)
(transact-db! :delete-blocks conn deleting-blocks {})))))
(defn- insert-or-move-block
[conn block-uuid remote-parents remote-block-order move? op-value]
(when (or (seq remote-parents) remote-block-order) ;at least one of parent|order exists
(let [first-remote-parent (first remote-parents)
local-parent (when first-remote-parent (d/entity @conn [:block/uuid first-remote-parent]))
b (d/entity @conn [:block/uuid block-uuid])]
(case [false (some? local-parent) (some? remote-block-order)]
[false true true]
(do
(if move?
(transact-db! :move-blocks conn [(block-reuse-db-id b)] local-parent {:sibling? false})
(transact-db! :insert-blocks conn
[{:block/uuid block-uuid}]
local-parent {:sibling? false :keep-uuid? true}))
(transact-db! :update-block-order-directly conn block-uuid first-remote-parent remote-block-order))
[false true false]
(if move?
(transact-db! :move-blocks conn [b] local-parent
{:sibling? false})
(transact-db! :insert-no-order-blocks conn [[block-uuid first-remote-parent]]))
[false false true] ;no parent, only update order. e.g. update property's order
(when (and (empty? remote-parents) move?)
(transact-db! :update-block-order-directly conn block-uuid nil remote-block-order))
(let [e (ex-info "Don't know where to insert" {:block-uuid block-uuid
:remote-parents remote-parents
:remote-block-order remote-block-order
:move? move?
:op-value op-value})]
(log/error :insert-or-move-block e)
(throw e))))))
(defn- move-ops-map->sorted-move-ops
[move-ops-map]
(let [uuid->dep-uuids (into {} (map (fn [[uuid env]] [uuid (set (conj (:parents env)))]) move-ops-map))
all-uuids (set (keys move-ops-map))
sorted-uuids
(loop [r []
rest-uuids all-uuids
uuid (first rest-uuids)]
(if-not uuid
r
(let [dep-uuids (uuid->dep-uuids uuid)]
(if-let [next-uuid (first (set/intersection dep-uuids rest-uuids))]
(recur r rest-uuids next-uuid)
(let [rest-uuids* (disj rest-uuids uuid)]
(recur (conj r uuid) rest-uuids* (first rest-uuids*)))))))]
(mapv move-ops-map sorted-uuids)))
(defn- apply-remote-remove-page-ops
[conn remove-page-ops]
(doseq [op remove-page-ops]
(worker-page/delete! conn (:block-uuid op) {:persist-op? false})))
(defn- get-schema-ref+cardinality
[db-schema attr]
(when-let [k-schema (get db-schema attr)]
[(= :db.type/ref (:db/valueType k-schema))
(= :db.cardinality/many (:db/cardinality k-schema))]))
(defn- patch-remote-attr-map-by-local-av-coll
[remote-attr-map local-av-coll]
(let [a->add->v-set
(reduce
(fn [m [a v _t add?]]
(let [{add-vset true retract-vset false} (get m a {true #{} false #{}})]
(assoc m a {true ((if add? conj disj) add-vset v)
false ((if add? disj conj) retract-vset v)})))
{} local-av-coll)
updated-remote-attr-map1
(keep
(fn [[remote-a remote-v]]
(when-let [{add-vset true retract-vset false} (get a->add->v-set remote-a)]
[remote-a
(if (coll? remote-v)
(-> (set remote-v)
(set/union add-vset)
(set/difference retract-vset)
vec)
(cond
(seq add-vset) (first add-vset)
(contains? retract-vset remote-v) nil))]))
remote-attr-map)
updated-remote-attr-map2
(keep
(fn [[a add->v-set]]
(when-let [ns (namespace a)]
(when (and (not (contains? #{"block"} ns))
;; FIXME: only handle non-block/xxx attrs,
;; because some :block/xxx attrs are card-one, we only generate card-many values here
(not (contains? remote-attr-map a)))
(when-let [v-set (not-empty (get add->v-set true))]
[a (vec v-set)]))))
a->add->v-set)]
(into remote-attr-map
(concat updated-remote-attr-map1 updated-remote-attr-map2))))
(defn- update-remote-data-by-local-unpushed-ops
"when remote-data request client to move/update/remove/... blocks,
these updates maybe not needed or need to update, because this client just updated some of these blocks,
so we need to update these remote-data by local-ops"
[affected-blocks-map local-unpushed-ops]
(assert (client-op/ops-coercer local-unpushed-ops) local-unpushed-ops)
(reduce
(fn [affected-blocks-map local-op]
(let [local-op-value (last local-op)]
(case (first local-op)
:move
(let [block-uuid (:block-uuid local-op-value)
remote-op (get affected-blocks-map block-uuid)]
(case (:op remote-op)
:remove (dissoc affected-blocks-map (:block-uuid remote-op))
:move (dissoc affected-blocks-map (:self remote-op))
;; remove block/order, parents in update-attrs, if there're some unpushed local move-ops
(:update-attrs :move+update-attrs)
(update affected-blocks-map (:self remote-op) dissoc :block/order :parents)
;; default
affected-blocks-map))
:update
(let [block-uuid (:block-uuid local-op-value)]
(if-let [remote-op (get affected-blocks-map block-uuid)]
(let [remote-op* (if (#{:update-attrs :move :move+update-attrs} (:op remote-op))
(patch-remote-attr-map-by-local-av-coll remote-op (:av-coll local-op-value))
remote-op)]
(assoc affected-blocks-map block-uuid remote-op*))
affected-blocks-map))
:remove
;; TODO: if this block's updated by others, we shouldn't remove it
;; but now, we don't know who updated this block recv from remote
;; once we have this attr(:block/updated-by, :block/created-by), we can finish this TODO
(let [block-uuid (:block-uuid local-op-value)]
(dissoc affected-blocks-map block-uuid))
;;else
affected-blocks-map)))
affected-blocks-map local-unpushed-ops))
(defn- affected-blocks->diff-type-ops
[repo affected-blocks]
(let [unpushed-block-ops (client-op/get-all-block-ops repo)
affected-blocks-map* (if unpushed-block-ops
(update-remote-data-by-local-unpushed-ops
affected-blocks unpushed-block-ops)
affected-blocks)
{remove-ops-map :remove move-ops-map :move update-ops-map :update-attrs
move+update-ops-map :move+update-attrs
update-page-ops-map :update-page remove-page-ops-map :remove-page}
(update-vals
(group-by (fn [[_ env]] (get env :op)) affected-blocks-map*)
(partial into {}))]
{:remove-ops-map remove-ops-map
:move-ops-map (merge move-ops-map move+update-ops-map)
:update-ops-map (merge update-ops-map move+update-ops-map)
:update-page-ops-map update-page-ops-map
:remove-page-ops-map remove-page-ops-map}))
(defn- check-block-pos
"NOTE: some blocks don't have :block/order (e.g. whiteboard blocks)"
[db block-uuid remote-parents remote-block-order]
(let [local-b (d/entity db [:block/uuid block-uuid])
remote-parent-uuid (first remote-parents)]
(cond
(nil? local-b)
:not-exist
(not= [remote-block-order remote-parent-uuid]
[(:block/order local-b) (:block/uuid (:block/parent local-b))])
:wrong-pos
:else nil)))
(def ^:private update-op-watched-attrs
#{:block/title
:block/updated-at
:block/created-at
:block/alias
:block/tags
:block/link
:block/journal-day
:logseq.property/classes
:logseq.property/value})
(def ^:private watched-attr-ns
(conj db-property/logseq-property-namespaces "logseq.class"))
(defn- update-op-watched-attr?
[attr]
(or (contains? update-op-watched-attrs attr)
(when-let [ns (namespace attr)]
(or (contains? watched-attr-ns ns)
(string/ends-with? ns ".property")
(string/ends-with? ns ".class")))))
(defn- diff-block-kv->tx-data
[db db-schema e k local-v remote-v]
(when-let [[ref? card-many?] (get-schema-ref+cardinality db-schema k)]
(case [ref? card-many?]
[true true]
(let [[local-only remote-only] (data/diff (set local-v) (set remote-v))]
(cond-> []
(seq local-only) (concat (map (fn [block-uuid] [:db/retract e k [:block/uuid block-uuid]]) local-only))
(seq remote-only) (concat (keep (fn [block-uuid]
(when-let [db-id (:db/id (d/entity db [:block/uuid block-uuid]))]
[:db/add e k db-id])) remote-only))))
[true false]
(let [remote-block-uuid (if (coll? remote-v) (first remote-v) remote-v)]
(when (not= local-v remote-block-uuid)
(if (nil? remote-block-uuid)
[[:db/retract e k]]
(when-let [db-id (:db/id (d/entity db [:block/uuid remote-block-uuid]))]
[[:db/add e k db-id]]))))
[false false]
(let [remote-v* (if (coll? remote-v)
(first (map ldb/read-transit-str remote-v))
(ldb/read-transit-str remote-v))]
(when (not= local-v remote-v*)
(if (nil? remote-v*)
;; FIXME: The following judgment is a temporary fix for incomplete server blocks,
;; remove it once it's confirmed that server blocks will not be incomplete.
(when-not (contains? #{:block/created-at :block/updated-at} k)
[[:db/retract e k]])
[[:db/add e k remote-v*]])))
[false true]
(let [_ (assert (or (nil? remote-v) (coll? remote-v)) {:remote-v remote-v :a k :e e})
remote-v* (set (map ldb/read-transit-str remote-v))
[local-only remote-only] (data/diff (set local-v) remote-v*)]
(cond-> []
(seq local-only) (concat (map (fn [v]
[:db/retract e k v]) local-only))
(seq remote-only) (concat (map (fn [v] [:db/add e k v]) remote-only)))))))
(defn- diff-block-map->tx-data
[db e local-block-map remote-block-map]
(let [db-schema (d/schema db)
tx-data1
(mapcat
(fn [[k local-v]]
(let [remote-v (get remote-block-map k)]
(seq (diff-block-kv->tx-data db db-schema e k local-v remote-v))))
local-block-map)
tx-data2
(mapcat
(fn [[k remote-v]]
(let [local-v (get local-block-map k)]
(seq (diff-block-kv->tx-data db db-schema e k local-v remote-v))))
(apply dissoc remote-block-map (keys local-block-map)))]
(concat tx-data1 tx-data2)))
(defn- remote-op-value->tx-data
"ignore-attr-set: don't update local attrs in this set"
[db ent op-value ignore-attr-set]
(assert (some? (:db/id ent)) ent)
(let [db-schema (d/schema db)
local-block-map (->> ent
(filter (fn [[attr _]]
(and (update-op-watched-attr? attr)
(not (contains? ignore-attr-set attr)))))
(keep (fn [[k v]]
(when-let [[ref? card-many?] (get-schema-ref+cardinality db-schema k)]
[k
(case [ref? card-many?]
[true true]
(keep (fn [x] (when-let [e (:db/id x)] (:block/uuid (d/entity db e)))) v)
[true false]
(let [v* (some->> (:db/id v) (d/entity db) :block/uuid)]
(assert (some? v*) v)
v*)
;; else
v)])))
(into {}))
remote-block-map (->> op-value
(filter (comp update-op-watched-attr? first))
(keep (fn [[k v]]
;; all non-built-in attrs is card-many in remote-op,
;; convert them according to the client db-schema
(when-let [[_ref? card-many?] (get-schema-ref+cardinality db-schema k)]
[k
(if (and (coll? v) (not card-many?))
(first v)
v)])))
(into {}))]
(diff-block-map->tx-data db (:db/id ent) local-block-map remote-block-map)))
(defn- remote-op-value->schema-tx-data
[block-uuid op-value]
(when-let [db-ident (:db/ident op-value)]
(let [schema-map (some-> op-value :client/schema ldb/read-transit-str)]
[(merge {:block/uuid block-uuid :db/ident db-ident} schema-map)])))
(defn- update-block-order
[e op-value]
(if-let [order (:block/order op-value)]
{:op-value (dissoc op-value :block/order)
:tx-data [[:db/add e :block/order order]]}
{:op-value op-value}))
(defn- update-block-attrs
[conn block-uuid op-value]
(when-let [ent (d/entity @conn [:block/uuid block-uuid])]
(when (some (fn [k] (= "block" (namespace k))) (keys op-value)) ; there exists some :block/xxx attrs
(let [{update-block-order-tx-data :tx-data op-value :op-value} (update-block-order (:db/id ent) op-value)
tx-meta {:persist-op? false :gen-undo-ops? false :rtc-op? true}]
(when-let [schema-tx-data (remote-op-value->schema-tx-data block-uuid op-value)]
(ldb/transact! conn schema-tx-data tx-meta))
(when-let [tx-data (seq (remote-op-value->tx-data @conn ent (dissoc op-value :client/schema)
rtc-const/ignore-attrs-when-syncing))]
(ldb/transact! conn (concat tx-data update-block-order-tx-data) tx-meta))))))
(defn- apply-remote-update-ops
[conn update-ops]
(doseq [{:keys [parents self] block-order :block/order :as op-value} update-ops]
(when (and parents block-order)
(let [r (check-block-pos @conn self parents block-order)]
(case r
:not-exist
(insert-or-move-block conn self parents block-order false op-value)
:wrong-pos
(insert-or-move-block conn self parents block-order true op-value)
nil)))
(update-block-attrs conn self op-value)))
(defn- apply-remote-move-ops
[conn sorted-move-ops]
(doseq [{:keys [parents self] block-order :block/order :as op-value} sorted-move-ops]
(let [r (check-block-pos @conn self parents block-order)]
(case r
:not-exist
(do (insert-or-move-block conn self parents block-order false op-value)
(update-block-attrs conn self op-value))
:wrong-pos
(insert-or-move-block conn self parents block-order true op-value)
;; else
nil))))
(defn- need-update-block-attrs-when-apply-update-page-op?
[update-page-op-value]
(seq (set/difference (set (keys update-page-op-value)) #{:op :self :page-name :block/title})))
(defn- apply-remote-update-page-ops
[repo conn update-page-ops]
(doseq [{:keys [self _page-name]
title :block/title
:as op-value} update-page-ops]
(let [db-ident (:db/ident op-value)]
(when-not (or
;; property or class exists
(and db-ident (d/entity @conn db-ident))
;; journal with the same block/uuid exists
(ldb/journal? (d/entity @conn [:block/uuid self])))
(let [create-opts {:uuid self
:old-db-id (@worker-state/*deleted-block-uuid->db-id self)}
[_ page-name page-uuid] (worker-page/rtc-create-page! conn
(ldb/read-transit-str title)
(worker-state/get-date-formatter repo)
create-opts)]
;; TODO: current page-create fn is buggy, even provide :uuid option, it will create-page with different uuid,
;; if there's already existing same name page
(assert (= page-uuid self) {:page-name page-name :page-uuid page-uuid :should-be self})
(assert (some? (d/entity @conn [:block/uuid page-uuid])) {:page-uuid page-uuid :page-name page-name})))
(when (need-update-block-attrs-when-apply-update-page-op? op-value)
(update-block-attrs conn self op-value)))))
(defn- ensure-refed-blocks-exist
"Ensure refed-blocks from remote existing in client"
[repo conn refed-blocks]
(let [sorted-refed-blocks (common-util/sort-coll-by-dependency :block/uuid :block/parent refed-blocks)]
(doseq [refed-block sorted-refed-blocks]
(let [ent (d/entity @conn [:block/uuid (:block/uuid refed-block)])]
(when-not ent
(log/info :ensure-refed-blocks-exist refed-block)
(if (:block/name refed-block)
(apply-remote-update-page-ops repo conn [(-> refed-block
(assoc :self (:block/uuid refed-block))
(dissoc :block/uuid))])
(apply-remote-move-ops conn [(-> refed-block
(assoc :self (:block/uuid refed-block)
:parents [(:block/parent refed-block)])
(dissoc :block/uuid))])))))))
(defn task--decrypt-blocks-in-remote-update-data
[aes-key encrypt-attr-set remote-update-data]
(assert aes-key)
(m/sp
(let [{affected-blocks-map :affected-blocks refed-blocks :refed-blocks} remote-update-data
affected-blocks-map'
(loop [[[block-uuid affected-block] & rest-affected-blocks] affected-blocks-map
affected-blocks-map-result {}]
(if-not block-uuid
affected-blocks-map-result
(let [affected-block' (c.m/<? (crypt/<decrypt-map aes-key encrypt-attr-set affected-block))]
(recur rest-affected-blocks (assoc affected-blocks-map-result block-uuid affected-block')))))
refed-blocks'
(loop [[refed-block & rest-refed-blocks] refed-blocks
refed-blocks-result []]
(if-not refed-block
refed-blocks-result
(let [refed-block' (c.m/<? (crypt/<decrypt-map aes-key encrypt-attr-set refed-block))]
(recur rest-refed-blocks (conj refed-blocks-result refed-block')))))]
(assoc remote-update-data
:affected-blocks affected-blocks-map'
:refed-blocks refed-blocks'))))
(defn apply-remote-update-check
"If the check passes, return true"
[repo remote-update-event add-log-fn]
(let [remote-update-data (:value remote-update-event)]
(assert (rtc-api-schema/data-from-ws-validator remote-update-data) remote-update-data)
(let [{remote-latest-t :t
remote-t-before :t-before
remote-t :t-query-end} remote-update-data
remote-t (or remote-t remote-latest-t) ;TODO: remove this, be compatible with old-clients for now
local-tx (client-op/get-local-tx repo)]
(cond
(not (and (pos? remote-t)
(pos? remote-t-before)))
(throw (ex-info "invalid remote-data" {:data remote-update-data}))
(<= remote-t local-tx)
(do (add-log-fn :rtc.log/apply-remote-update
{:sub-type :skip
:remote-t remote-t
:remote-latest-t remote-latest-t
:local-t local-tx})
false)
(< local-tx remote-t-before)
(do (add-log-fn :rtc.log/apply-remote-update {:sub-type :need-pull-remote-data
:remote-latest-t remote-latest-t
:remote-t remote-t
:local-t local-tx
:remote-t-before remote-t-before})
(throw (ex-info "need pull earlier remote-data"
{:type :rtc.exception/local-graph-too-old
:local-tx local-tx})))
(<= remote-t-before local-tx remote-t) true
:else (throw (ex-info "unreachable" {:remote-t remote-t
:remote-t-before remote-t-before
:remote-latest-t remote-latest-t
:local-t local-tx}))))))
(defn task--apply-remote-update
"Apply remote-update(`remote-update-event`)"
[graph-uuid repo conn remote-update-event aes-key add-log-fn]
(m/sp
(when (apply-remote-update-check repo remote-update-event add-log-fn)
(let [remote-update-data (:value remote-update-event)
remote-update-data (if aes-key
(m/? (task--decrypt-blocks-in-remote-update-data
aes-key rtc-const/encrypt-attr-set
remote-update-data))
remote-update-data)
;; TODO: remove this 'or', be compatible with old-clients for now
remote-t (or (:t-query-end remote-update-data) (:t remote-update-data))
{affected-blocks-map :affected-blocks refed-blocks :refed-blocks} remote-update-data
{:keys [remove-ops-map move-ops-map update-ops-map update-page-ops-map remove-page-ops-map]}
(affected-blocks->diff-type-ops repo affected-blocks-map)
remove-ops (vals remove-ops-map)
sorted-move-ops (move-ops-map->sorted-move-ops move-ops-map)
update-ops (vals update-ops-map)
update-page-ops (vals update-page-ops-map)
remove-page-ops (vals remove-page-ops-map)
db-before @conn
tx-meta {:rtc-tx? true
:persist-op? false
:gen-undo-ops? false}]
(rtc-log-and-state/update-remote-t graph-uuid remote-t)
(js/console.groupCollapsed "rtc/apply-remote-ops-log")
(ldb/transact-with-temp-conn!
conn tx-meta
(fn [temp-conn _*batch-tx-data]
(worker-util/profile :ensure-refed-blocks-exist (ensure-refed-blocks-exist repo temp-conn refed-blocks))
(worker-util/profile :apply-remote-update-page-ops (apply-remote-update-page-ops repo temp-conn update-page-ops))
(worker-util/profile :apply-remote-move-ops (apply-remote-move-ops temp-conn sorted-move-ops))
(worker-util/profile :apply-remote-update-ops (apply-remote-update-ops temp-conn update-ops))
(worker-util/profile :apply-remote-remove-page-ops (apply-remote-remove-page-ops temp-conn remove-page-ops))))
;; NOTE: we cannot set :persist-op? = true when batch-tx/with-batch-tx-mode (already set to false)
;; and there're some transactions in `apply-remote-remove-ops` need to :persist-op?=true
(worker-util/profile :apply-remote-remove-ops (apply-remote-remove-ops conn remove-ops))
;; wait all remote-ops transacted into db,
;; then start to check any asset-updates in remote
(let [db-after @conn]
(r.asset/emit-remote-asset-updates-from-block-ops db-before db-after remove-ops update-ops))
(js/console.groupEnd)
(client-op/update-local-tx repo remote-t)
(rtc-log-and-state/update-local-t graph-uuid remote-t)))))

View File

@@ -1,30 +0,0 @@
(ns frontend.worker.rtc.skeleton
"Validate skeleton data between server and client"
(:require [clojure.data :as data]
[datascript.core :as d]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]))
(defn- get-builtin-db-idents
[db]
(d/q '[:find [?i ...]
:in $
:where
[?b :db/ident ?i]
[?b :block/uuid]
[?b :logseq.property/built-in?]]
db))
(defn calibrate-graph-skeleton
[server-schema-version server-builtin-db-idents db]
(let [client-builtin-db-idents (set (get-builtin-db-idents db))
client-schema-version (ldb/get-graph-schema-version db)]
(when-not (zero? (db-schema/compare-schema-version client-schema-version server-schema-version))
(log/warn "RTC schema error: client version doesn't match server's version"
[client-schema-version server-schema-version]))
(let [[client-only server-only _]
(data/diff client-builtin-db-idents server-builtin-db-idents)]
(when (or (seq client-only) (seq server-only))
(log/warn :db-idents-diff {:client-only client-only
:server-only server-only})))))

View File

@@ -1,54 +0,0 @@
(ns frontend.worker.rtc.throttle
"Adjust the synchronization frequency dynamically based on the client's RTC-related API call volume."
(:require [cljs.cache :as cache]
[frontend.common.missionary :as c.m]
[frontend.worker.rtc.client-op :as client-op]
[lambdaisland.glogi :as log]
[missionary.core :as m])
(:import [missionary Cancelled]))
(def ^:private api-calls-count-threshold 5)
(def ^:private *rtc-api-calls (atom (cache/ttl-cache-factory {} :ttl 30000)))
(defn- through
[cache item]
(let [k (random-uuid)]
(cache/through (constantly item) cache k)))
(def ^:private sentinel (js-obj))
(defn- get-items
[cache]
(let [cache*
;; clean expired items
(-> cache
(cache/miss sentinel sentinel)
(cache/evict sentinel))]
(vals cache*)))
(defn- compute-stats
"TODO: add more stat-data. e.g. total ws-message-size"
[api-calls]
{:count (count api-calls)})
(defn create-local-updates-check-flow
"Return a flow: emit if need to push local-updates"
[repo *auto-push? min-interval-ms]
(let [auto-push-flow (m/watch *auto-push?)
clock-flow (c.m/clock min-interval-ms :clock)
check-flow (m/latest vector auto-push-flow clock-flow)]
(m/ap
(m/?< check-flow)
(try
(let [recent-rtc-api-calls-count (:count (compute-stats (get-items @*rtc-api-calls)))]
(when (and goog.DEBUG
(> recent-rtc-api-calls-count api-calls-count-threshold))
(log/info :rtc-throttle {:recent-rtc-api-calls-count recent-rtc-api-calls-count}))
(if (and (<= recent-rtc-api-calls-count api-calls-count-threshold)
(pos? (client-op/get-unpushed-ops-count repo)))
true
(m/amb)))
(catch Cancelled _ (m/amb))))))
(defn add-rtc-api-call-record!
[api-call-record]
(swap! *rtc-api-calls through api-call-record))

View File

@@ -1,236 +0,0 @@
(ns frontend.worker.rtc.ws
"Websocket wrapped by missionary.
based on
https://github.com/ReilySiegel/missionary-websocket/blob/master/src/com/reilysiegel/missionary/websocket.cljs"
(:require [cljs-http-missionary.client :as http]
[frontend.common.missionary :as c.m]
[frontend.worker.flows :as worker-flows]
[frontend.worker.rtc.debug-log :as rtc-debug-log]
[frontend.worker.rtc.exception :as r.ex]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[missionary.core :as m]))
(defn- get-state
[ws]
(case (.-readyState ws)
0 :connecting
1 :open
2 :closing
3 :closed))
(defn- open-ws-task
[url]
(fn [s! f!]
(try
(let [ws (js/WebSocket. url)]
(set! (.-onopen ws)
(fn [_]
(let [close-dfv (m/dfv)
mbx (m/mbx)]
(set! (.-onopen ws) nil)
(set! (.-onmessage ws) (fn [e] (mbx (.-data e))))
(set! (.-onclose ws) (fn [e]
(set! (.-onclose ws) nil)
(close-dfv e)))
(s! [mbx ws close-dfv]))))
(set! (.-onclose ws)
(fn [e]
(set! (.-onopen ws) nil)
(set! (.-onclose ws) nil)
(f! e)))
(fn canceller []
;; canceller will be called(no gua) even this task succeed
;; should only cancel :connecting state websocket
;; see also some explanations from lib author about canceller:
;; https://clojurians.slack.com/archives/CL85MBPEF/p1714323302110269
(when (= :connecting (get-state ws))
(.close ws))))
(catch :default e
(f! e) #(do)))))
(defn- handle-close
[x]
(if (instance? js/CloseEvent x)
(throw x)
x))
(defn- create-mws*
[url]
(m/sp
(let [[mbx ws close-dfv] (m/? (open-ws-task url))]
{:raw-ws ws
:send (fn [data]
(m/sp
(handle-close
(m/?
(m/race close-dfv
(m/sp (while (< 4096 (.-bufferedAmount ws))
(m/? (m/sleep 50)))
(.send ws data)))))))
:recv-flow
(m/stream
(m/ap
(loop []
(m/amb
(handle-close
(m/? (m/race close-dfv mbx)))
(recur)))))})))
(defn closed?
[mws]
(contains? #{:closing :closed} (get-state (:raw-ws mws))))
(defn mws-create
"Return a task that create a mws (missionary wrapped websocket).
When failed to open websocket, retry with backoff.
TODO: retry ASAP once network condition changed"
[url & {:keys [retry-count open-ws-timeout]
:or {retry-count 10 open-ws-timeout 10000}}]
(assert (and (pos-int? retry-count)
(pos-int? open-ws-timeout))
[retry-count open-ws-timeout])
(c.m/backoff
{:delay-seq (take retry-count c.m/delays)
:reset-flow worker-flows/online-event-flow}
(m/sp
(try
(if-let [ws (m/? (m/timeout (create-mws* url) open-ws-timeout))]
ws
(throw (ex-info "open websocket timeout" {:missionary/retry true
:type :rtc.exception/ws-timeout})))
(catch js/CloseEvent e
(throw (ex-info "failed to open websocket conn"
{:missionary/retry true}
e)))))))
(defn create-mws-state-flow
[mws]
(m/relieve
(m/observe
(fn ctor [emit!]
(let [ws (:raw-ws mws)
old-onclose (.-onclose ws)
old-onerror (.-onerror ws)
old-onopen (.-onopen ws)]
(set! (.-onclose ws) (fn [e]
(when old-onclose (old-onclose e))
(emit! (get-state ws))))
(set! (.-onerror ws) (fn [e]
(when old-onerror (old-onerror e))
(emit! (get-state ws))))
(set! (.-onopen ws) (fn [e]
(when old-onopen (old-onopen e))
(emit! (get-state ws))))
(emit! (get-state ws))
(fn dtor []
(set! (.-onclose ws) old-onclose)
(set! (.-onerror ws) old-onerror)
(set! (.-onopen ws) old-onopen)))))))
(comment
(defn close
[m-ws]
(.close (:raw-ws m-ws))))
(defn send
"Returns a task: send message"
[mws message]
(m/sp
(let [decoded-message (rtc-api-schema/data-to-ws-coercer message)
message-str (js/JSON.stringify (clj->js (rtc-api-schema/data-to-ws-encoder decoded-message)))]
(rtc-debug-log/log-ws-message! :send message-str)
(m/? ((:send mws) message-str)))))
(defn- recv-flow*
"Throw if recv `Internal server error`"
[m-ws]
(assert (some? (:recv-flow m-ws)) m-ws)
(m/eduction
(map (fn [message]
(rtc-debug-log/log-ws-message! :recv message)
message))
(map #(js->clj (js/JSON.parse %) :keywordize-keys true))
(map (fn [m]
(if (contains?
#{"Endpoint request timed out"
"Internal server error"}
(:message m))
(throw r.ex/ex-unknown-server-error)
m)))
(map rtc-api-schema/data-from-ws-coercer)
(:recv-flow m-ws)))
(defn recv-flow
"Throw if recv `Internal server error`.
Also take care of :s3-presign-url.(when response is too huge, it's stored in s3)"
[m-ws]
(let [f (recv-flow* m-ws)]
(m/ap
(let [resp (m/?> f)]
(if-let [s3-presign-url (:s3-presign-url resp)]
(let [{:keys [status body]} (m/? (http/get s3-presign-url {:with-credentials? false}))]
(if (http/unexceptional-status? status)
(rtc-api-schema/data-from-ws-coercer (js->clj (js/JSON.parse body) :keywordize-keys true))
{:req-id (:req-id resp)
:ex-message "get s3 object failed"
:ex-data {:type :rtc.exception/get-s3-object-failed :status status :body body}}))
resp)))))
(defn- send&recv*
"Return a task: send message wait to recv its response and return it.
Throw if timeout"
[mws message & {:keys [timeout-ms s3-get-timeout-ms]
:or {timeout-ms 10000 s3-get-timeout-ms 10000}}]
{:pre [(pos-int? timeout-ms)
(some? (:req-id message))]}
(m/sp
(m/? (send mws message))
(let [req-id (:req-id message)
ws-message-result
(m/?
(m/timeout
(m/reduce
(fn [_ v]
(when (= req-id (:req-id v))
(reduced v)))
(recv-flow* mws))
timeout-ms
(ex-info (str "recv ws message timeout (" timeout-ms "ms)") {})))
result (if-let [s3-presign-url (:s3-presign-url ws-message-result)]
(let [{:keys [status body] :as r}
(m/? (m/timeout
(http/get s3-presign-url {:with-credentials? false})
s3-get-timeout-ms
(ex-info (str "recv s3 message timeout (" s3-get-timeout-ms "ms)") {})))]
(cond
(instance? ExceptionInfo r) r
(http/unexceptional-status? status)
(rtc-api-schema/data-from-ws-coercer (js->clj (js/JSON.parse body) :keywordize-keys true))
:else
{:req-id (:req-id ws-message-result)
:ex-message "get s3 object failed"
:ex-data {:type :rtc.exception/get-s3-object-failed :status status :body body}}))
ws-message-result)]
(when (instance? ExceptionInfo result)
(throw (ex-info (ex-message result) {:missionary/retry true
:type :rtc.exception/ws-timeout
:message message})))
result)))
(defn send&recv
"Return a task that send the message then wait to recv its response.
Throw if timeout"
[mws message & {:keys [timeout-ms] :or {timeout-ms 10000}}]
(m/sp
(let [req-id (str (random-uuid))
message (assoc message :req-id req-id)]
(m/? (send&recv* mws message :timeout-ms timeout-ms)))))
(comment
(defn- inject-fake-message-to-recv
"Debug fn.
use `queryObjects(WebSocket)` to fetch all websocket objs under db-worker.js context"
[ws fake-message-to-recv]
(.dispatchEvent ws (js/MessageEvent. "message" #js {:data fake-message-to-recv}))))

View File

@@ -1,105 +0,0 @@
(ns frontend.worker.rtc.ws-util
"Add RTC related logic to the function based on ws."
(:require [cljs-http-missionary.client :as http]
[frontend.worker-common.util :as worker-util]
[frontend.worker.rtc.db :as rtc-db]
[frontend.worker.rtc.ws :as ws]
[frontend.worker.state :as worker-state]
[goog.string :as gstring]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[logseq.graph-parser.utf8 :as utf8]
[missionary.core :as m]))
(def ^:private remote-e-type->ex-info
{:ws-conn-already-disconnected
(ex-info "websocket conn is already disconnected" {:type :rtc.exception/ws-already-disconnected})
:graph-not-exist
(ex-info "remote graph not exist" {:type :rtc.exception/remote-graph-not-exist})
:graph-not-ready
(ex-info "remote graph still creating" {:type :rtc.exception/remote-graph-not-ready})
:bad-request-body
(ex-info "bad request body" {:type :rtc.exception/bad-request-body})
:not-allowed
(ex-info "not allowed" {:type :rtc.exception/not-allowed})
:client-graph-too-old
(ex-info "local graph too old" {:type :rtc.exception/local-graph-too-old})})
(defn- handle-remote-ex
[resp]
(when (= :graph-not-exist (:type (:ex-data resp)))
(rtc-db/remove-rtc-data-in-conn! (worker-state/get-current-repo))
(worker-util/post-message :remote-graph-gone []))
(if-let [e (get remote-e-type->ex-info (:type (:ex-data resp)))]
(throw e)
resp))
(defn- put-apply-ops-message-on-s3-if-too-huge
"Return a task that return s3-key"
[ws message]
{:pre [(= "apply-ops" (:action message))]}
(m/sp
(let [decoded-message (rtc-api-schema/data-to-ws-coercer (assoc message :req-id "temp-id"))
message-str (js/JSON.stringify
(clj->js (select-keys (rtc-api-schema/data-to-ws-encoder decoded-message)
["graph-uuid" "ops" "t-before" "schema-version" "api-version"])))
len (.-length (utf8/encode message-str))]
(when (< 100000 len)
(let [{:keys [url key]} (m/? (ws/send&recv ws {:action "presign-put-temp-s3-obj"}))
{:keys [status] :as resp} (m/? (http/put url {:body message-str :with-credentials? false}))]
(when-not (http/unexceptional-status? status)
(throw (ex-info "failed to upload apply-ops message" {:resp resp})))
key)))))
(defn send&recv
"Return a task: throw exception if recv ex-data response.
This function will attempt to reconnect and retry once after the ws closed(js/CloseEvent).
For huge apply-ops request(>100KB),
- upload its request message to s3 first,
then add `s3-key` key to request message map"
[get-ws-create-task message & {:keys [timeout-ms] :or {timeout-ms 10000}}]
(let [task--helper
(m/sp
(let [ws (m/? get-ws-create-task)
opts {:timeout-ms timeout-ms}
s3-key (when (= "apply-ops" (:action message))
(m/? (put-apply-ops-message-on-s3-if-too-huge ws message)))
message* (if s3-key
(-> message
(assoc :s3-key s3-key)
(dissoc :graph-uuid :ops :t-before :schema-version :api-version))
message)]
(handle-remote-ex (m/? (ws/send&recv ws message* opts)))))]
(m/sp
(try
(m/? task--helper)
(catch js/CloseEvent _
;; retry once
(m/? task--helper))))))
(defn get-ws-url
[token]
(assert (some? token))
(when-let [url @worker-state/*rtc-ws-url]
(gstring/format url token)))
(defn- gen-get-ws-create-map
"Return a map with atom *current-ws and a task
that get current ws, create one if needed(closed or not created yet)"
[url & {:keys [retry-count open-ws-timeout]
:or {retry-count 10 open-ws-timeout 10000}}]
(let [*current-ws (atom nil)
ws-create-task (ws/mws-create url {:retry-count retry-count :open-ws-timeout open-ws-timeout})]
{:*current-ws *current-ws
:get-ws-create-task
(m/sp
(let [ws @*current-ws]
(if (and ws
(not (ws/closed? ws)))
ws
(let [ws (m/? ws-create-task)]
(reset! *current-ws ws)
ws))))}))
(def gen-get-ws-create-map--memoized
"Return a memoized task to reuse the same websocket."
(memoize gen-get-ws-create-map))

View File

@@ -38,18 +38,14 @@
:auth/refresh-token nil
:user/info nil
:rtc/downloading-graph? false
;; thread atoms, these atoms' value are syncing from ui-thread
:thread-atom/online-event (atom nil)}))
(defonce *rtc-ws-url (atom nil))
(defonce *db-sync-config (atom {:enabled? false :ws-url nil}))
(defonce *db-sync-config (atom {:enabled? true :ws-url nil}))
(defonce *db-sync-client (atom nil))
(defonce *sqlite (atom nil))
;; repo -> {:db conn :search conn :client-ops conn :debug-log conn}
;; repo -> {:db conn :search conn :client-ops conn}
(defonce *sqlite-conns (atom {}))
;; repo -> conn
(defonce *datascript-conns (atom nil))
@@ -64,7 +60,7 @@
(defn get-sqlite-conn
([repo] (get-sqlite-conn repo :db))
([repo which-db]
(assert (contains? #{:db :search :client-ops :debug-log} which-db) which-db)
(assert (contains? #{:db :search :client-ops} which-db) which-db)
(get-in @*sqlite-conns [repo which-db])))
(defn get-datascript-conn
@@ -106,15 +102,6 @@
(swap! *state (fn [old-state]
(merge old-state new-state))))
;; TODO: Move or remove as this is no longer stateful
(defn get-date-formatter
[_repo]
"MMM do, yyyy")
(defn set-rtc-downloading-graph!
[value]
(swap! *state assoc :rtc/downloading-graph? value))
(defn get-id-token
[]
(:auth/id-token @*state))

View File

@@ -1,18 +1,17 @@
(ns frontend.worker.db-sync
(ns frontend.worker.sync
"Simple db-sync client based on promesa + WebSocket."
(:require ["/frontend/idbkv" :as idb-keyval]
[cljs-bean.core :as bean]
(:require [cljs-bean.core :as bean]
[clojure.set :as set]
[clojure.string :as string]
[datascript.core :as d]
[datascript.storage :refer [IStorage]]
[frontend.common.crypt :as crypt]
[frontend.worker-common.util :as worker-util]
[frontend.worker.handler.page :as worker-page]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.const :as rtc-const]
[frontend.worker.shared-service :as shared-service]
[frontend.worker.state :as worker-state]
[frontend.worker.sync.client-op :as client-op]
[frontend.worker.sync.const :as rtc-const]
[frontend.worker.sync.crypt :as sync-crypt]
[lambdaisland.glogi :as log]
[logseq.common.util :as common-util]
[logseq.db :as ldb]
@@ -27,8 +26,6 @@
[promesa.core :as p]))
(defonce *repo->latest-remote-tx (atom {}))
(defonce ^:private *repo->aes-key (atom {}))
(defonce ^:private e2ee-store (delay (idb-keyval/newStore "localforage" "keyvaluepairs" 2)))
(defn- current-client
[repo]
@@ -224,13 +221,6 @@
(catch :default e
(fail-fast :db-sync/response-parse-failed (assoc context :error e)))))
(defn- coerce-http-request [schema-key body]
(if-let [coercer (get db-sync-schema/http-request-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :request})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn- coerce-http-response [schema-key body]
(if-let [coercer (get db-sync-schema/http-response-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :response})]
@@ -326,295 +316,6 @@
:url url
:body body}))))))
(def ^:private invalid-transit ::invalid-transit)
(defn- graph-e2ee?
[repo]
(when-let [conn (worker-state/get-datascript-conn repo)]
(true? (ldb/get-graph-rtc-e2ee? @conn))))
(defn- user-uuid []
(some-> (worker-state/get-id-token) worker-util/parse-jwt :sub))
(defn- graph-encrypted-aes-key-idb-key
[graph-id]
(str "rtc-encrypted-aes-key###" graph-id))
(defn- <get-item
[k]
(assert (and k @e2ee-store))
(p/let [r (idb-keyval/get k @e2ee-store)]
(js->clj r :keywordize-keys true)))
(defn- <set-item!
[k value]
(assert (and k @e2ee-store))
(idb-keyval/set k value @e2ee-store))
(defn- <clear-item!
[k]
(assert (and k @e2ee-store))
(idb-keyval/del k @e2ee-store))
(defn e2ee-base
[]
(http-base-url))
(defn <fetch-user-rsa-key-pair-raw
[base]
(fetch-json (str base "/e2ee/user-keys")
{:method "GET"}
{:response-schema :e2ee/user-keys}))
(defn <upload-user-rsa-key-pair!
[base public-key encrypted-private-key]
(let [body (coerce-http-request :e2ee/user-keys
{:public-key public-key
:encrypted-private-key encrypted-private-key})]
(when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/user-keys :body body}))
(fetch-json (str base "/e2ee/user-keys")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/user-keys})))
(defn- <ensure-user-rsa-key-pair-raw
[base]
(p/let [existing (-> (<fetch-user-rsa-key-pair-raw base)
(p/catch (fn [error]
(throw error))))]
(if (and (string? (:public-key existing))
(string? (:encrypted-private-key existing)))
existing
(p/let [{:keys [publicKey privateKey]} (crypt/<generate-rsa-key-pair)
{:keys [password]} (worker-state/<invoke-main-thread :thread-api/request-e2ee-password)
encrypted-private-key (crypt/<encrypt-private-key password privateKey)
exported-public-key (crypt/<export-public-key publicKey)
public-key-str (ldb/write-transit-str exported-public-key)
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)]
(p/let [_ (<upload-user-rsa-key-pair! base public-key-str encrypted-private-key-str)]
{:public-key public-key-str
:encrypted-private-key encrypted-private-key-str})))))
(defn ensure-user-rsa-keys!
[]
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base}))
(<ensure-user-rsa-key-pair-raw base)))
(defn- <decrypt-private-key
[encrypted-private-key-str]
(p/let [encrypted-private-key (ldb/read-transit-str encrypted-private-key-str)
exported-private-key (worker-state/<invoke-main-thread
:thread-api/decrypt-user-e2ee-private-key
encrypted-private-key)]
(crypt/<import-private-key exported-private-key)))
(defn- <import-public-key
[public-key-str]
(p/let [exported (ldb/read-transit-str public-key-str)]
(crypt/<import-public-key exported)))
(defn- <fetch-user-public-key-by-email
[base email]
(fetch-json (str base "/e2ee/user-public-key?email=" (js/encodeURIComponent email))
{:method "GET"}
{:response-schema :e2ee/user-public-key}))
(defn- <fetch-graph-encrypted-aes-key-raw
[base graph-id]
(fetch-json (str base "/e2ee/graphs/" graph-id "/aes-key")
{:method "GET"}
{:response-schema :e2ee/graph-aes-key}))
(defn- <upsert-graph-encrypted-aes-key!
[base graph-id encrypted-aes-key]
(let [body (coerce-http-request :e2ee/graph-aes-key
{:encrypted-aes-key encrypted-aes-key})]
(when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/graph-aes-key :body body}))
(fetch-json (str base "/e2ee/graphs/" graph-id "/aes-key")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/graph-aes-key})))
(defn- <ensure-graph-aes-key
[repo graph-id]
(if-not (graph-e2ee? repo)
(p/resolved nil)
(if-let [cached (get @*repo->aes-key repo)]
(p/resolved cached)
(let [base (e2ee-base)
user-id (user-uuid)]
(when-not (and (string? base) (string? user-id))
(fail-fast :db-sync/missing-field {:base base :user-id user-id :graph-id graph-id}))
(p/let [{:keys [public-key encrypted-private-key]} (<ensure-user-rsa-key-pair-raw base)
public-key' (when (string? public-key) (<import-public-key public-key))
private-key' (when (string? encrypted-private-key) (<decrypt-private-key encrypted-private-key))
local-encrypted (when graph-id
(<get-item (graph-encrypted-aes-key-idb-key graph-id)))
remote-encrypted (when (and (nil? local-encrypted) graph-id)
(p/let [resp (<fetch-graph-encrypted-aes-key-raw base graph-id)]
(when-let [encrypted-aes-key (:encrypted-aes-key resp)]
(ldb/read-transit-str encrypted-aes-key))))
encrypted-aes-key (or local-encrypted remote-encrypted)
aes-key (if encrypted-aes-key
(crypt/<decrypt-aes-key private-key' encrypted-aes-key)
(p/let [aes-key (crypt/<generate-aes-key)
encrypted (crypt/<encrypt-aes-key public-key' aes-key)
encrypted-str (ldb/write-transit-str encrypted)
_ (<upsert-graph-encrypted-aes-key! base graph-id encrypted-str)
_ (<set-item! (graph-encrypted-aes-key-idb-key graph-id) encrypted)]
aes-key))
_ (when (and graph-id encrypted-aes-key (nil? local-encrypted))
(<set-item! (graph-encrypted-aes-key-idb-key graph-id) encrypted-aes-key))]
(swap! *repo->aes-key assoc repo aes-key)
aes-key)))))
(defn <fetch-graph-aes-key-for-download
[repo graph-id]
(let [base (e2ee-base)
aes-key-k (graph-encrypted-aes-key-idb-key graph-id)]
(when-not (and (string? base) (string? graph-id))
(fail-fast :db-sync/missing-field {:base base :graph-id graph-id}))
(p/let [{:keys [public-key encrypted-private-key]} (<fetch-user-rsa-key-pair-raw base)]
(<clear-item! aes-key-k)
(when-not (and (string? public-key) (string? encrypted-private-key))
(fail-fast :db-sync/missing-field {:graph-id graph-id :field :user-rsa-key-pair}))
(p/let [private-key (<decrypt-private-key encrypted-private-key)
encrypted-aes-key (p/let [resp (<fetch-graph-encrypted-aes-key-raw base graph-id)]
(when-let [encrypted-aes-key (:encrypted-aes-key resp)]
(ldb/read-transit-str encrypted-aes-key)))]
(if-not encrypted-aes-key
(fail-fast :db-sync/missing-field {:graph-id graph-id :field :encrypted-aes-key})
(<set-item! aes-key-k encrypted-aes-key))
(p/let [aes-key (crypt/<decrypt-aes-key private-key encrypted-aes-key)]
(swap! *repo->aes-key assoc repo aes-key)
aes-key)))))
(defn <grant-graph-access!
[repo graph-id target-email]
(if-not (graph-e2ee? repo)
(p/resolved nil)
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base :graph-id graph-id}))
(p/let [aes-key (<ensure-graph-aes-key repo graph-id)
_ (when (nil? aes-key)
(fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))
resp (<fetch-user-public-key-by-email base target-email)
public-key-str (:public-key resp)]
(if-not (string? public-key-str)
(fail-fast :db-sync/missing-field {:repo repo :field :public-key :email target-email})
(p/let [public-key (<import-public-key public-key-str)
encrypted (crypt/<encrypt-aes-key public-key aes-key)
encrypted-str (ldb/write-transit-str encrypted)
body (coerce-http-request :e2ee/grant-access
{:target-user-email+encrypted-aes-key-coll
[{:email target-email
:encrypted-aes-key encrypted-str}]})
_ (when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/grant-access :body body}))
_ (fetch-json (str base "/e2ee/graphs/" graph-id "/grant-access")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/grant-access})]
nil))))))
(defn- <encrypt-text-value
[aes-key value]
(assert (string? value) (str "encrypting value should be a string, value: " value))
(p/let [encrypted (crypt/<encrypt-text aes-key (ldb/write-transit-str value))]
(ldb/write-transit-str encrypted)))
(defn- <decrypt-text-value
[aes-key value]
(assert (string? value) (str "encrypted value should be a string, value: " value))
(let [decoded (ldb/read-transit-str value)]
(if (= decoded invalid-transit)
(p/resolved value)
(p/let [value (crypt/<decrypt-text-if-encrypted aes-key decoded)
value' (ldb/read-transit-str value)]
value'))))
(defn- encrypt-tx-item
[aes-key item]
(cond
(and (vector? item) (<= 4 (count item)))
(let [attr (nth item 2)
v (nth item 3)]
(if (contains? rtc-const/encrypt-attr-set attr)
(p/let [v' (<encrypt-text-value aes-key v)]
(assoc item 3 v'))
(p/resolved item)))
:else
(p/resolved item)))
(defn- decrypt-tx-item
[aes-key item]
(cond
(and (vector? item) (<= 4 (count item)))
(let [attr (nth item 2)
v (nth item 3)]
(if (contains? rtc-const/encrypt-attr-set attr)
(p/let [v' (<decrypt-text-value aes-key v)]
(assoc item 3 v'))
(p/resolved item)))
:else
(p/resolved item)))
(defn- <encrypt-tx-data
[aes-key tx-data]
(when (seq tx-data)
(p/let [items (p/all (mapv (fn [item] (encrypt-tx-item aes-key item)) tx-data))]
(vec items))))
(defn- <decrypt-tx-data
[aes-key tx-data]
(when (seq tx-data)
(p/let [items (p/all (mapv (fn [item] (decrypt-tx-item aes-key item)) tx-data))]
(vec items))))
(defn- <decrypt-keys-attrs
[aes-key keys]
(p/all (mapv (fn [[e a v t]]
(if (contains? rtc-const/encrypt-attr-set a)
(p/let [v' (<decrypt-text-value aes-key v)]
[e a v' t])
(p/resolved [e a v t]))) keys)))
(defn- <decrypt-snapshot-row
[aes-key [addr content addresses]]
(p/let [data (ldb/read-transit-str content)
keys' (if (map? data)
(<decrypt-keys-attrs aes-key (:keys data))
(p/let [result (p/all (map #(<decrypt-keys-attrs aes-key %) data))]
;; if you truly need a vector:
(vec result)))
data' (if (map? data) (assoc data :keys keys') keys')
content' (ldb/write-transit-str data')]
[addr content' addresses]))
(defn <decrypt-snapshot-rows-batch
[aes-key rows-batch]
(p/all (map #(<decrypt-snapshot-row aes-key %) rows-batch)))
(defn- <encrypt-datoms
[aes-key datoms]
(p/all
(mapv (fn [d]
(if (contains? rtc-const/encrypt-attr-set (:a d))
(p/let [v' (<encrypt-text-value aes-key (:v d))]
(assoc d :v v'))
d))
datoms)))
(defn- upsert-addr-content!
[^js db data]
(.transaction
@@ -782,11 +483,11 @@
;; (prn :debug :upload :tx-data tx-data)
(when (seq txs)
(->
(p/let [aes-key (<ensure-graph-aes-key repo (:graph-id client))
_ (when (and (graph-e2ee? repo) (nil? aes-key))
(p/let [aes-key (sync-crypt/<ensure-graph-aes-key repo (:graph-id client))
_ (when (and (sync-crypt/graph-e2ee? repo) (nil? aes-key))
(fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))
tx-data* (if aes-key
(<encrypt-tx-data aes-key tx-data)
(sync-crypt/<encrypt-tx-data aes-key tx-data)
tx-data)]
(reset! (:inflight client) tx-ids)
@@ -1188,11 +889,11 @@
txs)
tx (distinct (mapcat identity txs-data))]
(when (seq tx)
(p/let [aes-key (<ensure-graph-aes-key repo (:graph-id client))
_ (when (and (graph-e2ee? repo) (nil? aes-key))
(p/let [aes-key (sync-crypt/<ensure-graph-aes-key repo (:graph-id client))
_ (when (and (sync-crypt/graph-e2ee? repo) (nil? aes-key))
(fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))
tx* (if aes-key
(<decrypt-tx-data aes-key tx)
(sync-crypt/<decrypt-tx-data aes-key tx)
(p/resolved tx))]
(apply-remote-tx! repo client tx*)
(client-op/update-local-tx repo remote-tx)
@@ -1418,8 +1119,8 @@
payload)))]
(if (and (seq base) (seq graph-id))
(if-let [source-conn (worker-state/get-datascript-conn repo)]
(p/let [aes-key (<ensure-graph-aes-key repo graph-id)
_ (when (and (graph-e2ee? repo) (nil? aes-key))
(p/let [aes-key (sync-crypt/<ensure-graph-aes-key repo graph-id)
_ (when (and (sync-crypt/graph-e2ee? repo) (nil? aes-key))
(fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))]
(set-graph-e2ee-enabled! repo)
(ensure-client-graph-uuid! repo graph-id)
@@ -1427,7 +1128,7 @@
_ (prn :debug :datoms-count (count datoms) :time (js/Date.))
_ (update-progress {:sub-type :upload-progress
:message "Encrypting data"})
encrypted-datoms (<encrypt-datoms aes-key datoms)
encrypted-datoms (sync-crypt/<encrypt-datoms aes-key datoms)
{:keys [db] :as temp} (<create-temp-sqlite-conn (d/schema @source-conn) encrypted-datoms)
total-rows (count-kvs-rows db)]
(->

View File

@@ -1,8 +1,8 @@
(ns frontend.worker.rtc.asset-db-listener
(ns frontend.worker.sync.asset-db-listener
"Listen asset-block changes in db, generate asset-sync operations"
(:require [datascript.core :as d]
[frontend.worker.db-listener :as db-listener]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.sync.client-op :as client-op]
[logseq.db :as ldb]))
(defn- max-t

View File

@@ -0,0 +1,192 @@
(ns frontend.worker.sync.client-op
"Store client-ops in a persisted datascript"
(:require [datascript.core :as d]
[frontend.worker.state :as worker-state]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[malli.core :as ma]
[malli.transform :as mt]))
(def op-schema
[:multi {:dispatch first}
[:update-asset
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]
[:remove-asset
[:catn
[:op :keyword]
[:t :int]
[:value [:map
[:block-uuid :uuid]]]]]])
(def ops-schema [:sequential op-schema])
(def ops-coercer (ma/coercer ops-schema mt/json-transformer nil
#(do (log/error ::bad-ops (:value %))
(ma/-fail! ::ops-schema (select-keys % [:value])))))
(def ^:private asset-op-types #{:update-asset :remove-asset})
(def schema-in-db
"TODO: rename this db-name from client-op to client-metadata+op.
and move it to its own namespace."
{:block/uuid {:db/unique :db.unique/identity}
:db-ident {:db/unique :db.unique/identity}
:db-ident-or-block-uuid {:db/unique :db.unique/identity}
;; local-tx is the latest remote-tx that local db persists
:local-tx {:db/index true}
:graph-uuid {:db/index true}
:db-sync/checksum {:db/index true}
:db-sync/tx-id {:db/unique :db.unique/identity}
:db-sync/created-at {:db/index true}
:db-sync/tx-data {}
:db-sync/normalized-tx-data {}
:db-sync/reversed-tx-data {}})
(defn update-graph-uuid
[repo graph-uuid]
{:pre [(some? graph-uuid)]}
(when-let [conn (worker-state/get-client-ops-conn repo)]
(ldb/transact! conn [[:db/add "e" :graph-uuid graph-uuid]])))
(defn get-graph-uuid
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(:v (first (d/datoms @conn :avet :graph-uuid)))))
(defn update-local-tx
[repo t]
{:pre [(some? t)]}
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [tx-data
(if-let [datom (first (d/datoms @conn :avet :local-tx))]
[:db/add (:e datom) :local-tx t]
(if-let [datom (first (d/datoms @conn :avet :db-sync/checksum))]
[:db/add (:e datom) :local-tx t]
[:db/add "e" :local-tx t]))]
(ldb/transact! conn [tx-data]))))
(comment
(defn update-local-checksum
[repo checksum]
{:pre [(some? checksum)]}
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [tx-data
(if-let [datom (first (d/datoms @conn :avet :db-sync/checksum))]
[:db/add (:e datom) :db-sync/checksum checksum]
(if-let [datom (first (d/datoms @conn :avet :local-tx))]
[:db/add (:e datom) :db-sync/checksum checksum]
[:db/add "e" :db-sync/checksum checksum]))]
(ldb/transact! conn [tx-data])))))
(defn remove-local-tx
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(when-let [datom (first (d/datoms @conn :avet :local-tx))]
(ldb/transact! conn [[:db/retract (:e datom) :local-tx]
[:db/retract (:e datom) :db-sync/checksum]]))))
(defn get-local-tx
[repo]
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(let [r (:v (first (d/datoms @conn :avet :local-tx)))]
;; (assert (some? r))
r)))
(comment
(defn get-local-checksum
[repo]
(let [conn (worker-state/get-client-ops-conn repo)]
(assert (some? conn) repo)
(:v (first (d/datoms @conn :avet :db-sync/checksum))))))
(defn rtc-db-graph?
"Is RTC enabled"
[repo]
(or (exists? js/process)
(some? (get-graph-uuid repo))))
;;; asset ops
(defn add-asset-ops
[repo asset-ops]
(let [conn (worker-state/get-client-ops-conn repo)
ops (ops-coercer asset-ops)]
(assert (some? conn) repo)
(letfn [(already-removed? [remove-op t]
(some-> remove-op second (> t)))
(update-after-remove? [update-op t]
(some-> update-op second (> t)))]
(doseq [op ops]
(let [[op-type t value] op
{:keys [block-uuid]} value
exist-block-ops-entity (d/entity @conn [:block/uuid block-uuid])
e (:db/id exist-block-ops-entity)]
(when-let [tx-data
(not-empty
(case op-type
:update-asset
(let [remove-asset-op (get exist-block-ops-entity :remove-asset)]
(when-not (already-removed? remove-asset-op t)
(cond-> [{:block/uuid block-uuid
:update-asset op}]
remove-asset-op (conj [:db.fn/retractAttribute e :remove-asset]))))
:remove-asset
(let [update-asset-op (get exist-block-ops-entity :update-asset)]
(when-not (update-after-remove? update-asset-op t)
(cond-> [{:block/uuid block-uuid
:remove-asset op}]
update-asset-op (conj [:db.fn/retractAttribute e :update-asset]))))))]
(ldb/transact! conn tx-data)))))))
(defn add-all-exists-asset-as-ops
[repo]
(let [conn (worker-state/get-datascript-conn repo)
_ (assert (some? conn))
asset-block-uuids (d/q '[:find [?block-uuid ...]
:where
[?b :block/uuid ?block-uuid]
[?b :logseq.property.asset/type]]
@conn)
ops (map
(fn [block-uuid] [:update-asset 1 {:block-uuid block-uuid}])
asset-block-uuids)]
(add-asset-ops repo ops)))
(defn- get-all-asset-ops*
[db]
(->> (d/datoms db :eavt)
(group-by :e)
(keep (fn [[e datoms]]
(let [op-map (into {}
(keep (fn [datom]
(let [a (:a datom)]
(when (or (keyword-identical? :block/uuid a) (contains? asset-op-types a))
[a (:v datom)]))))
datoms)]
(when (and (:block/uuid op-map)
;; count>1 = contains some `asset-op-types`
(> (count op-map) 1))
[e op-map]))))
(into {})))
(defn get-unpushed-asset-ops-count
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(count (get-all-asset-ops* @conn))))
(defn get-all-asset-ops
[repo]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(vals (get-all-asset-ops* @conn))))
(defn remove-asset-op
[repo asset-uuid]
(when-let [conn (worker-state/get-client-ops-conn repo)]
(let [ent (d/entity @conn [:block/uuid asset-uuid])]
(when-let [e (:db/id ent)]
(ldb/transact! conn (map (fn [a] [:db.fn/retractAttribute e a]) asset-op-types))))))

View File

@@ -1,4 +1,4 @@
(ns frontend.worker.rtc.const
(ns frontend.worker.sync.const
"RTC constants"
(:require [logseq.common.defkeywords :as common-def :refer [defkeywords]]
[logseq.db.frontend.kv-entity :as kv-entity]
@@ -21,16 +21,6 @@
;; {:doc "keyword option for RTC. ignore this *entity* when syncing graph. Default false"}
)
(def ignore-attrs-when-init-upload
(into #{}
(keep (fn [[kw config]] (when (get-in config [:rtc :rtc/ignore-attr-when-init-upload]) kw)))
db-property/built-in-properties))
(def ignore-attrs-when-init-download
(into #{}
(keep (fn [[kw config]] (when (get-in config [:rtc :rtc/ignore-attr-when-init-download]) kw)))
db-property/built-in-properties))
(def ignore-attrs-when-syncing
(into #{}
(keep (fn [[kw config]] (when (get-in config [:rtc :rtc/ignore-attr-when-syncing]) kw)))
@@ -41,11 +31,6 @@
(keep (fn [[kw config]] (when (get-in config [:rtc :rtc/ignore-entity-when-init-upload]) kw)))
kv-entity/kv-entities))
(def ignore-entities-when-init-download
(into #{}
(keep (fn [[kw config]] (when (get-in config [:rtc :rtc/ignore-entity-when-init-download]) kw)))
kv-entity/kv-entities))
(def encrypt-attr-set
"block attributes that need to be encrypted"
#{:block/title :block/name})

View File

@@ -0,0 +1,506 @@
(ns frontend.worker.sync.crypt
"E2EE helpers for db-sync."
(:require ["/frontend/idbkv" :as idb-keyval]
[clojure.string :as string]
[frontend.common.crypt :as crypt]
[frontend.common.file.opfs :as opfs]
[frontend.common.thread-api :refer [def-thread-api]]
[frontend.worker-common.util :as worker-util]
[frontend.worker.state :as worker-state]
[frontend.worker.sync.const :as sync-const]
[lambdaisland.glogi :as log]
[logseq.db :as ldb]
[logseq.db-sync.malli-schema :as db-sync-schema]
[promesa.core :as p]))
(defonce ^:private *repo->aes-key (atom {}))
(defonce ^:private e2ee-store (delay (idb-keyval/newStore "localforage" "keyvaluepairs" 2)))
(defonce ^:private e2ee-password-file "e2ee-password")
(defonce ^:private native-env?
(let [href (try (.. js/self -location -href)
(catch :default _ nil))]
(boolean (and (string? href)
(or (string/includes? href "electron=true")
(string/includes? href "capacitor=true"))))))
(def ^:private invalid-coerce ::invalid-coerce)
(def ^:private invalid-transit ::invalid-transit)
(defn- native-worker?
[]
native-env?)
(defn- <native-save-password-text!
[encrypted-text]
(worker-state/<invoke-main-thread :thread-api/native-save-e2ee-password encrypted-text))
(defn- <native-read-password-text
[]
(worker-state/<invoke-main-thread :thread-api/native-get-e2ee-password))
(defn- <save-e2ee-password
[refresh-token password]
(p/let [result (crypt/<encrypt-text-by-text-password refresh-token password)
text (ldb/write-transit-str result)]
(if (native-worker?)
(-> (p/let [_ (<native-save-password-text! text)]
nil)
(p/catch (fn [e]
(log/error :native-save-e2ee-password {:error e})
(opfs/<write-text! e2ee-password-file text))))
(opfs/<write-text! e2ee-password-file text))))
(defn- <read-e2ee-password
[refresh-token]
(p/let [text (if (native-worker?)
(<native-read-password-text)
(opfs/<read-text! e2ee-password-file))
data (ldb/read-transit-str text)
password (crypt/<decrypt-text-by-text-password refresh-token data)]
password))
(defn- auth-token []
(worker-state/get-id-token))
(defn- auth-headers []
(when-let [token (auth-token)]
{"authorization" (str "Bearer " token)}))
(defn- with-auth-headers [opts]
(if-let [auth (auth-headers)]
(assoc opts :headers (merge (or (:headers opts) {}) auth))
opts))
(defn- coerce
[coercer value context]
(try
(coercer value)
(catch :default e
(log/error :db-sync/malli-coerce-failed (merge context {:error e :value value}))
invalid-coerce)))
(defn- coerce-http-request [schema-key body]
(if-let [coercer (get db-sync-schema/http-request-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :request})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn- coerce-http-response [schema-key body]
(if-let [coercer (get db-sync-schema/http-response-coercers schema-key)]
(let [coerced (coerce coercer body {:schema schema-key :dir :response})]
(when-not (= coerced invalid-coerce)
coerced))
body))
(defn fail-fast [tag data]
(log/error tag data)
(throw (ex-info (name tag) data)))
(defn e2ee-base
[]
(or (:http-base @worker-state/*db-sync-config)
(when-let [ws-url (:ws-url @worker-state/*db-sync-config)]
(let [base (cond
(string/starts-with? ws-url "wss://")
(str "https://" (subs ws-url (count "wss://")))
(string/starts-with? ws-url "ws://")
(str "http://" (subs ws-url (count "ws://")))
:else ws-url)]
(string/replace base #"/sync/%s$" "")))))
(defn- fetch-json
[url opts {:keys [response-schema error-schema] :or {error-schema :error}}]
(p/let [resp (js/fetch url (clj->js (with-auth-headers opts)))
text (.text resp)
data (when (seq text) (js/JSON.parse text))]
(if (.-ok resp)
(let [body (js->clj data :keywordize-keys true)
body (if response-schema
(coerce-http-response response-schema body)
body)]
(if (or (nil? response-schema) body)
body
(throw (ex-info "db-sync invalid response"
{:status (.-status resp)
:url url
:body body}))))
(let [body (when data (js->clj data :keywordize-keys true))
body (if error-schema
(coerce-http-response error-schema body)
body)]
(throw (ex-info "db-sync request failed"
{:status (.-status resp)
:url url
:body body}))))))
(defn graph-e2ee?
[repo]
(when-let [conn (worker-state/get-datascript-conn repo)]
(true? (ldb/get-graph-rtc-e2ee? @conn))))
(defn- get-user-uuid []
(some-> (worker-state/get-id-token) worker-util/parse-jwt :sub))
(defn- <get-item
[k]
(assert (and k @e2ee-store))
(p/let [r (idb-keyval/get k @e2ee-store)]
(js->clj r :keywordize-keys true)))
(defn- <set-item!
[k value]
(assert (and k @e2ee-store))
(idb-keyval/set k value @e2ee-store))
(defn- <clear-item!
[k]
(assert (and k @e2ee-store))
(idb-keyval/del k @e2ee-store))
(defn- graph-encrypted-aes-key-idb-key
[graph-id]
(str "rtc-encrypted-aes-key###" graph-id))
(defn <fetch-user-rsa-key-pair-raw
[base]
(fetch-json (str base "/e2ee/user-keys")
{:method "GET"}
{:response-schema :e2ee/user-keys}))
(defn <upload-user-rsa-key-pair!
[base public-key encrypted-private-key]
(let [body (coerce-http-request :e2ee/user-keys
{:public-key public-key
:encrypted-private-key encrypted-private-key})]
(when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/user-keys :body body}))
(fetch-json (str base "/e2ee/user-keys")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/user-keys})))
(defn- <ensure-user-rsa-key-pair-raw
[base]
(p/let [existing (-> (<fetch-user-rsa-key-pair-raw base)
(p/catch (fn [error]
(throw error))))]
(if (and (string? (:public-key existing))
(string? (:encrypted-private-key existing)))
existing
(p/let [{:keys [publicKey privateKey]} (crypt/<generate-rsa-key-pair)
{:keys [password]} (worker-state/<invoke-main-thread :thread-api/request-e2ee-password)
encrypted-private-key (crypt/<encrypt-private-key password privateKey)
exported-public-key (crypt/<export-public-key publicKey)
public-key-str (ldb/write-transit-str exported-public-key)
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)]
(p/let [_ (<upload-user-rsa-key-pair! base public-key-str encrypted-private-key-str)]
{:public-key public-key-str
:encrypted-private-key encrypted-private-key-str
:password password})))))
(defn ensure-user-rsa-keys!
[]
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base}))
(<ensure-user-rsa-key-pair-raw base)))
(defn- <decrypt-private-key
[encrypted-private-key-str]
(p/let [encrypted-private-key (ldb/read-transit-str encrypted-private-key-str)
exported-private-key (worker-state/<invoke-main-thread
:thread-api/decrypt-user-e2ee-private-key
encrypted-private-key)]
(crypt/<import-private-key exported-private-key)))
(defn- <import-public-key
[public-key-str]
(p/let [exported (ldb/read-transit-str public-key-str)]
(crypt/<import-public-key exported)))
(defn- <fetch-user-public-key-by-email
[base email]
(fetch-json (str base "/e2ee/user-public-key?email=" (js/encodeURIComponent email))
{:method "GET"}
{:response-schema :e2ee/user-public-key}))
(defn- <fetch-graph-encrypted-aes-key-raw
[base graph-id]
(fetch-json (str base "/e2ee/graphs/" graph-id "/aes-key")
{:method "GET"}
{:response-schema :e2ee/graph-aes-key}))
(defn- <upsert-graph-encrypted-aes-key!
[base graph-id encrypted-aes-key]
(let [body (coerce-http-request :e2ee/graph-aes-key
{:encrypted-aes-key encrypted-aes-key})]
(when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/graph-aes-key :body body}))
(fetch-json (str base "/e2ee/graphs/" graph-id "/aes-key")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/graph-aes-key})))
(defn <ensure-graph-aes-key
[repo graph-id]
(if-not (graph-e2ee? repo)
(p/resolved nil)
(if-let [cached (get @*repo->aes-key repo)]
(p/resolved cached)
(let [base (e2ee-base)
user-id (get-user-uuid)]
(when-not (and (string? base) (string? user-id))
(fail-fast :db-sync/missing-field {:base base :user-id user-id :graph-id graph-id}))
(p/let [{:keys [public-key encrypted-private-key]} (<ensure-user-rsa-key-pair-raw base)
public-key' (when (string? public-key) (<import-public-key public-key))
private-key' (when (string? encrypted-private-key) (<decrypt-private-key encrypted-private-key))
local-encrypted (when graph-id
(<get-item (graph-encrypted-aes-key-idb-key graph-id)))
remote-encrypted (when (and (nil? local-encrypted) graph-id)
(p/let [resp (<fetch-graph-encrypted-aes-key-raw base graph-id)]
(when-let [encrypted-aes-key (:encrypted-aes-key resp)]
(ldb/read-transit-str encrypted-aes-key))))
encrypted-aes-key (or local-encrypted remote-encrypted)
aes-key (if encrypted-aes-key
(crypt/<decrypt-aes-key private-key' encrypted-aes-key)
(p/let [aes-key (crypt/<generate-aes-key)
encrypted (crypt/<encrypt-aes-key public-key' aes-key)
encrypted-str (ldb/write-transit-str encrypted)
_ (<upsert-graph-encrypted-aes-key! base graph-id encrypted-str)
_ (<set-item! (graph-encrypted-aes-key-idb-key graph-id) encrypted)]
aes-key))
_ (when (and graph-id encrypted-aes-key (nil? local-encrypted))
(<set-item! (graph-encrypted-aes-key-idb-key graph-id) encrypted-aes-key))]
(swap! *repo->aes-key assoc repo aes-key)
aes-key)))))
(defn <fetch-graph-aes-key-for-download
[repo graph-id]
(let [base (e2ee-base)
aes-key-k (graph-encrypted-aes-key-idb-key graph-id)]
(when-not (and (string? base) (string? graph-id))
(fail-fast :db-sync/missing-field {:base base :graph-id graph-id}))
(p/let [{:keys [public-key encrypted-private-key]} (<fetch-user-rsa-key-pair-raw base)]
(<clear-item! aes-key-k)
(when-not (and (string? public-key) (string? encrypted-private-key))
(fail-fast :db-sync/missing-field {:graph-id graph-id :field :user-rsa-key-pair}))
(p/let [private-key (<decrypt-private-key encrypted-private-key)
encrypted-aes-key (p/let [resp (<fetch-graph-encrypted-aes-key-raw base graph-id)]
(when-let [encrypted-aes-key (:encrypted-aes-key resp)]
(ldb/read-transit-str encrypted-aes-key)))]
(if-not encrypted-aes-key
(fail-fast :db-sync/missing-field {:graph-id graph-id :field :encrypted-aes-key})
(<set-item! aes-key-k encrypted-aes-key))
(p/let [aes-key (crypt/<decrypt-aes-key private-key encrypted-aes-key)]
(swap! *repo->aes-key assoc repo aes-key)
aes-key)))))
(defn <grant-graph-access!
[repo graph-id target-email]
(if-not (graph-e2ee? repo)
(p/resolved nil)
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base :graph-id graph-id}))
(p/let [aes-key (<ensure-graph-aes-key repo graph-id)
_ (when (nil? aes-key)
(fail-fast :db-sync/missing-field {:repo repo :field :aes-key}))
resp (<fetch-user-public-key-by-email base target-email)
public-key-str (:public-key resp)]
(if-not (string? public-key-str)
(fail-fast :db-sync/missing-field {:repo repo :field :public-key :email target-email})
(p/let [public-key (<import-public-key public-key-str)
encrypted (crypt/<encrypt-aes-key public-key aes-key)
encrypted-str (ldb/write-transit-str encrypted)
body (coerce-http-request :e2ee/grant-access
{:target-user-email+encrypted-aes-key-coll
[{:email target-email
:encrypted-aes-key encrypted-str}]})
_ (when (nil? body)
(fail-fast :db-sync/invalid-field {:type :e2ee/grant-access :body body}))
_ (fetch-json (str base "/e2ee/graphs/" graph-id "/grant-access")
{:method "POST"
:headers {"content-type" "application/json"}
:body (js/JSON.stringify (clj->js body))}
{:response-schema :e2ee/grant-access})]
nil))))))
(defn- <encrypt-text-value
[aes-key value]
(assert (string? value) (str "encrypting value should be a string, value: " value))
(p/let [encrypted (crypt/<encrypt-text aes-key (ldb/write-transit-str value))]
(ldb/write-transit-str encrypted)))
(defn- <decrypt-text-value
[aes-key value]
(assert (string? value) (str "encrypted value should be a string, value: " value))
(let [decoded (ldb/read-transit-str value)]
(if (= decoded invalid-transit)
(p/resolved value)
(p/let [value (crypt/<decrypt-text-if-encrypted aes-key decoded)
value' (ldb/read-transit-str value)]
value'))))
(defn- encrypt-tx-item
[aes-key item]
(cond
(and (vector? item) (<= 4 (count item)))
(let [attr (nth item 2)
v (nth item 3)]
(if (contains? sync-const/encrypt-attr-set attr)
(p/let [v' (<encrypt-text-value aes-key v)]
(assoc item 3 v'))
(p/resolved item)))
:else
(p/resolved item)))
(defn- decrypt-tx-item
[aes-key item]
(cond
(and (vector? item) (<= 4 (count item)))
(let [attr (nth item 2)
v (nth item 3)]
(if (contains? sync-const/encrypt-attr-set attr)
(p/let [v' (<decrypt-text-value aes-key v)]
(assoc item 3 v'))
(p/resolved item)))
:else
(p/resolved item)))
(defn <encrypt-tx-data
[aes-key tx-data]
(p/let [items (p/all (mapv (fn [item] (encrypt-tx-item aes-key item)) tx-data))]
items))
(defn <decrypt-tx-data
[aes-key tx-data]
(p/let [items (p/all (mapv (fn [item] (decrypt-tx-item aes-key item)) tx-data))]
items))
(defn- <decrypt-datoms
[aes-key data]
(p/all
(map
(fn [[e a v t]]
(if (contains? sync-const/encrypt-attr-set a)
(p/let [v' (<decrypt-text-value aes-key v)]
[e a v' t])
[e a v t]))
data)))
(defn- <decrypt-snapshot-row
[aes-key row]
(let [[addr raw-content raw-addresses] row
data (ldb/read-transit-str raw-content)
addresses (when raw-addresses
(js/JSON.parse raw-addresses))]
(if (map? data)
(p/let [keys (:keys data)
keys' (if (seq keys)
(<decrypt-datoms aes-key (:keys data))
keys)
result (assoc data :keys keys')]
[addr (ldb/write-transit-str (cond-> result
(some? addresses)
(assoc :addresses addresses)))
raw-addresses])
(p/let [result (p/all (map #(<decrypt-datoms aes-key %) data))]
[addr (ldb/write-transit-str result) raw-addresses]))))
(defn <decrypt-snapshot-rows-batch
[aes-key rows-batch]
(p/all (map #(<decrypt-snapshot-row aes-key %) rows-batch)))
(defn <encrypt-datoms
[aes-key datoms]
(p/all (map (fn [d]
(if (contains? sync-const/encrypt-attr-set (:a d))
(p/let [v' (<encrypt-text-value aes-key (:v d))]
(assoc d :v v'))
(p/resolved d)))
datoms)))
(defn- <re-encrypt-private-key
[encrypted-private-key-str old-password new-password]
(p/let [encrypted-private-key (ldb/read-transit-str encrypted-private-key-str)
private-key (crypt/<decrypt-private-key old-password encrypted-private-key)
new-encrypted-private-key (crypt/<encrypt-private-key new-password private-key)]
(ldb/write-transit-str new-encrypted-private-key)))
(defn <change-e2ee-password!
[refresh-token user-uuid old-password new-password]
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base :user-uuid user-uuid}))
(p/let [{:keys [public-key encrypted-private-key]} (<fetch-user-rsa-key-pair-raw base)]
(when-not (and (string? public-key) (string? encrypted-private-key))
(fail-fast :db-sync/missing-field {:base base :user-uuid user-uuid :field :user-rsa-key-pair}))
(p/let [encrypted-private-key' (<re-encrypt-private-key encrypted-private-key old-password new-password)
_ (<upload-user-rsa-key-pair! base public-key encrypted-private-key')
_ (<save-e2ee-password refresh-token new-password)]
nil))))
(def-thread-api :thread-api/get-user-rsa-key-pair
[_token _user-uuid]
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base}))
(p/let [{:keys [public-key encrypted-private-key]} (<fetch-user-rsa-key-pair-raw base)]
(when (and public-key encrypted-private-key)
{:public-key public-key
:encrypted-private-key encrypted-private-key}))))
(def-thread-api :thread-api/init-user-rsa-key-pair
[_token refresh-token _user-uuid]
(let [base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base}))
(p/let [existing (<fetch-user-rsa-key-pair-raw base)]
(when-not (and (string? (:public-key existing))
(string? (:encrypted-private-key existing)))
(p/let [{:keys [publicKey privateKey]} (crypt/<generate-rsa-key-pair)
{:keys [password]} (worker-state/<invoke-main-thread :thread-api/request-e2ee-password)
encrypted-private-key (crypt/<encrypt-private-key password privateKey)
exported-public-key (crypt/<export-public-key publicKey)
public-key-str (ldb/write-transit-str exported-public-key)
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)
_ (<upload-user-rsa-key-pair! base public-key-str encrypted-private-key-str)
_ (<save-e2ee-password refresh-token password)]
nil)))))
(def-thread-api :thread-api/reset-user-rsa-key-pair
[_token refresh-token _user-uuid new-password]
(p/let [{:keys [publicKey privateKey]} (crypt/<generate-rsa-key-pair)
encrypted-private-key (crypt/<encrypt-private-key new-password privateKey)
exported-public-key (crypt/<export-public-key publicKey)
public-key-str (ldb/write-transit-str exported-public-key)
encrypted-private-key-str (ldb/write-transit-str encrypted-private-key)
base (e2ee-base)]
(when-not (string? base)
(fail-fast :db-sync/missing-field {:base base}))
(p/let [_ (<upload-user-rsa-key-pair! base public-key-str encrypted-private-key-str)
_ (<save-e2ee-password refresh-token new-password)]
nil)))
(def-thread-api :thread-api/change-e2ee-password
[_token refresh-token user-uuid old-password new-password]
(<change-e2ee-password! refresh-token user-uuid old-password new-password))
(def-thread-api :thread-api/get-e2ee-password
[refresh-token]
(-> (p/let [password (<read-e2ee-password refresh-token)]
{:password password})
(p/catch (fn [e]
(log/error :read-e2ee-password e)
(ex-info ":thread-api/get-e2ee-password" {})))))
(def-thread-api :thread-api/save-e2ee-password
[refresh-token password]
(<save-e2ee-password refresh-token password))

View File

@@ -0,0 +1,39 @@
(ns frontend.worker.sync.log-and-state
"Fns to generate rtc related logs"
(:require [frontend.worker.shared-service :as shared-service]
[logseq.common.defkeywords :refer [defkeywords]]
[malli.core :as ma]))
(def ^:private *rtc-log (atom nil))
(def ^:private rtc-log-type-schema
(vec
(concat
[:enum]
(take-nth
2
(defkeywords
:rtc.log/upload {:doc "rtc log type for upload-graph."}
:rtc.log/download {:doc "rtc log type for upload-graph."}
:rtc.asset.log/upload-assets {:doc "upload local assets to remote"}
:rtc.asset.log/download-assets {:doc "download assets from remote"}
:rtc.asset.log/remove-assets {:doc "remove remote assets"}
:rtc.asset.log/asset-too-large {:doc "asset is too large to upload"}
:rtc.asset.log/initial-download-missing-assets {:doc "download assets if not exists in rtc-asset-sync initial phase"})))))
(def ^:private rtc-log-type-validator (ma/validator rtc-log-type-schema))
(defn rtc-log
[type m]
{:pre [(map? m) (rtc-log-type-validator type)]}
(reset! *rtc-log (assoc m :type type :created-at (js/Date.)))
nil)
;;; subscribe-logs, push to frontend
;;; TODO: refactor by using c.m/run-background-task
(defn- subscribe-logs
[]
(remove-watch *rtc-log :subscribe-logs)
(add-watch *rtc-log :subscribe-logs
(fn [_ _ _ n] (when n (shared-service/broadcast-to-clients! :rtc-log n)))))
(subscribe-logs)

View File

@@ -3,11 +3,11 @@
#?(:cljs (:require-macros [frontend.worker-common.util]))
#?(:cljs (:refer-clojure :exclude [format]))
#?(:cljs (:require [clojure.string :as string]
[goog.crypt :as crypt]
[goog.crypt.base64 :as base64]
[goog.crypt.Hmac]
[goog.crypt.Sha256]
[logseq.db.common.sqlite :as common-sqlite]
[logseq.db :as ldb])))
[logseq.db :as ldb]
[logseq.db.common.sqlite :as common-sqlite])))
;; Copied from https://github.com/tonsky/datascript-todo
#?(:clj
@@ -46,7 +46,7 @@
(some-> jwt
(string/split ".")
second
(#(.decodeString ^js crypt/base64 % true))
(#(base64/decodeString % true))
js/JSON.parse
(js->clj :keywordize-keys true)
(update :cognito:username decode-username)))))

View File

@@ -83,8 +83,6 @@
(def ^:export clear_right_sidebar_blocks api-app/clear_right_sidebar_blocks)
(def ^:export push_state api-app/push_state)
(def ^:export replace_state api-app/replace_state)
(def ^:export export_debug_log_db api-app/export_debug_log_db)
(def ^:export reset_debug_log_db api-app/reset_debug_log_db)
;; db
(def ^:export q api-db/q)

View File

@@ -8,7 +8,6 @@
[frontend.db.utils :as db-utils]
[frontend.handler.command-palette :as palette-handler]
[frontend.handler.config :as config-handler]
[frontend.handler.export :as export-handler]
[frontend.handler.plugin :as plugin-handler]
[frontend.handler.recent :as recent-handler]
[frontend.handler.route :as route-handler]
@@ -173,13 +172,3 @@
(if-let [page-name (and page? (:name params))]
(route-handler/redirect-to-page! page-name {:anchor (:anchor query) :push false})
(rfe/replace-state k params query)))))
(def export_debug_log_db
(fn []
(when-let [repo (state/get-current-repo)]
(export-handler/export-repo-as-debug-log-sqlite! repo))))
(def reset_debug_log_db
(fn []
(when-let [repo (state/get-current-repo)]
(state/<invoke-db-worker-direct-pass :thread-api/reset-debug-log-db repo))))

View File

@@ -1,6 +1,6 @@
(ns frontend.handler.db-based.db-sync-test
(ns frontend.handler.db-based.sync-test
(:require [cljs.test :refer [deftest is async]]
[frontend.handler.db-based.db-sync :as db-sync]
[frontend.handler.db-based.sync :as db-sync]
[frontend.handler.user :as user-handler]
[promesa.core :as p]))

View File

@@ -130,11 +130,12 @@
(state/set-current-repo! nil)
(destroy-test-db!))
(def start-and-destroy-db-map-fixture
"To avoid 'Fixtures may not be of mixed types' error
(comment
(def start-and-destroy-db-map-fixture
"To avoid 'Fixtures may not be of mixed types' error
when use together with other map-type fixtures"
{:before start-test-db!
:after #(destroy-test-db!)})
{:before start-test-db!
:after #(destroy-test-db!)}))
(defn save-block!
"Wrapper around editor-handler/save-block! that also adds tags"
@@ -155,4 +156,4 @@
(db-test/find-page-by-title (conn/get-db) page-title))
(defn find-block-by-content [block-title]
(db-test/find-block-by-content (conn/get-db) block-title))
(db-test/find-block-by-content (conn/get-db) block-title))

View File

@@ -3,10 +3,10 @@
[clojure.data :as data]
[clojure.string :as string]
[datascript.core :as d]
[frontend.worker.db-sync :as db-sync]
[frontend.worker.handler.page :as worker-page]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.state :as worker-state]
[frontend.worker.sync :as db-sync]
[frontend.worker.sync.client-op :as client-op]
[logseq.db :as ldb]
[logseq.db.common.normalize :as db-normalize]
[logseq.db.test.helper :as db-test]

View File

@@ -1,9 +1,9 @@
(ns frontend.worker.db-sync-test
(:require [cljs.test :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.db-sync :as db-sync]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.state :as worker-state]
[frontend.worker.sync :as db-sync]
[frontend.worker.sync.client-op :as client-op]
[logseq.db.test.helper :as db-test]
[logseq.outliner.core :as outliner-core]))

View File

@@ -1,36 +0,0 @@
(ns frontend.worker.fixtures
(:require [datascript.core :as d]
[frontend.db.conn :as conn]
[frontend.test.helper :as test-helper]
[frontend.worker.db-listener :as worker-db-listener]))
(defn listen-test-db-fixture
[handler-keys]
(fn [f]
(let [test-db-conn (conn/get-db test-helper/test-db false)]
(assert (some? test-db-conn))
(worker-db-listener/listen-db-changes! test-helper/test-db test-db-conn
{:handler-keys handler-keys})
(f)
(d/unlisten! test-db-conn :frontend.worker.db-listener/listen-db-changes!))))
(comment
(def ^:private *tx-log-name-index (atom 0))
(defn listen-test-db-to-write-tx-log-json-file
"Write {:tx-log <tx-data-coll> :init-db <init-db>} to file 'tx-log-<index>.json'"
[f]
(let [test-db-conn (conn/get-db test-helper/test-db false)
init-db @test-db-conn
*tx-log (atom [])]
(d/listen! test-db-conn :collect-tx-data
(fn [{:keys [tx-data]}]
(swap! *tx-log conj tx-data)))
(try
(f)
(finally
(let [file-name (str "tx-log-" @*tx-log-name-index ".json")]
(println "saving " file-name " ...")
(fs-node/writeFileSync file-name (sqlite-util/write-transit-str {:tx-log @*tx-log :init-db init-db}))
(swap! *tx-log-name-index inc))))
(d/unlisten! test-db-conn :collect-tx-data))))

View File

@@ -1,249 +0,0 @@
(ns frontend.worker.rtc.client-op-test
(:require [cljs.test :as t :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.state :as worker-state]))
(deftest merge-update-ops-test
(testing "older op added after newer op should be merged in correct order"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
op1 [:update 1 {:block-uuid block-uuid
:av-coll [[:block/title "A" 1 true]]}]
op2 [:update 2 {:block-uuid block-uuid
:av-coll [[:block/title "B" 2 true]]}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
;; 1. Add newer op first
(client-op/add-ops! "repo" [op2])
;; 2. Add older op later
(client-op/add-ops! "repo" [op1])
;; 3. Verify merged result
(let [ops (client-op/get&remove-all-block-ops "repo")
merged-op (first ops)]
(is (= 1 (count ops)))
(is (= :update (first (:update merged-op))))
(is (= 2 (second (:update merged-op)))) ;; Should keep the max t
(let [av-coll (:av-coll (last (:update merged-op)))]
(is (= 2 (count av-coll)))
;; The av-coll should contain both, but order matters for semantic correctness.
;; Logic in merge-update-ops:
;; (if (> t1 t2) (merge-update-ops op2 op1) ...)
;; so it recursively ensures older is merged into newer (or concatenated properly).
;; effectively: (concat av-coll-of-older av-coll-of-newer)
(is (= [[:block/title "A" 1 true]
[:block/title "B" 2 true]]
av-coll))))))))
(deftest remove-op-wins-over-older-move-op-test
(testing "older move-op added after newer remove-op should NOT resurrect the block"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
move-op [:move 1 {:block-uuid block-uuid}]
remove-op [:remove 2 {:block-uuid block-uuid}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
;; 1. Add newer remove-op first
(client-op/add-ops! "repo" [remove-op])
;; 2. Add older move-op later (simulating rollback)
(client-op/add-ops! "repo" [move-op])
;; 3. Verify result
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
;; Expectation: The remove-op (t=2) should win over move-op (t=1).
;; The block should remain removed.
(is (some? (:remove result-op)))
(is (nil? (:move result-op)))
(is (= 2 (second (:remove result-op)))))))))
(deftest move-op-and-remove-op-exclusivity-test
(testing "move-op and remove-op should never coexist"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(testing "Scenario 1: Newer Move after Older Remove -> Move wins"
(let [remove-op [:remove 1 {:block-uuid block-uuid}]
move-op [:move 2 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [remove-op])
(client-op/add-ops! "repo" [move-op])
(let [ops (client-op/get-all-block-ops "repo")
;; ops = '([:move 2 {:block-uuid #uuid "461222f8-d43a-4726-9063-3a14b46d26b8"}])
result-op (first ops)]
(is (= :move (first result-op)))
(is (= 2 (second result-op)))
;; Clean up
(client-op/get&remove-all-block-ops "repo"))))
(testing "Scenario 2: Newer Remove after Older Move -> Remove wins"
(let [move-op [:move 1 {:block-uuid block-uuid}]
remove-op [:remove 2 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [move-op])
(client-op/add-ops! "repo" [remove-op])
(let [ops (client-op/get-all-block-ops "repo")
result-op (first ops)]
(is (= :remove (first result-op)))
(is (= 2 (second result-op)))
;; Clean up
(client-op/get&remove-all-block-ops "repo"))))))))
(deftest ops-combinations-test
(testing "Various combinations of ops on the same block"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(testing "Duplicate Ops: Add same op twice"
(let [move-op [:move 1 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [move-op])
(client-op/add-ops! "repo" [move-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:move result-op)))
(is (= 1 (second (:move result-op)))))))
(testing "Move then Update (older move, newer update) -> Both present"
(let [move-op [:move 1 {:block-uuid block-uuid}]
update-op [:update 2 {:block-uuid block-uuid :av-coll []}]]
(client-op/add-ops! "repo" [move-op])
(client-op/add-ops! "repo" [update-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:move result-op)))
(is (= 1 (second (:move result-op))))
(is (some? (:update result-op)))
(is (= 2 (second (:update result-op)))))))
(testing "Update then Move (older update, newer move) -> Both present"
(let [update-op [:update 1 {:block-uuid block-uuid :av-coll []}]
move-op [:move 2 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [update-op])
(client-op/add-ops! "repo" [move-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:update result-op)))
(is (= 1 (second (:update result-op))))
(is (some? (:move result-op)))
(is (= 2 (second (:move result-op)))))))
(testing "Update then Remove (Newer Remove) -> Remove wins"
(let [update-op [:update 1 {:block-uuid block-uuid :av-coll []}]
remove-op [:remove 2 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [update-op])
(client-op/add-ops! "repo" [remove-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:remove result-op)))
(is (nil? (:update result-op))))))
(testing "Remove then Update (Newer Update) -> Update wins"
(let [remove-op [:remove 1 {:block-uuid block-uuid}]
update-op [:update 2 {:block-uuid block-uuid :av-coll []}]]
(client-op/add-ops! "repo" [remove-op])
(client-op/add-ops! "repo" [update-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:update result-op)))
(is (nil? (:remove result-op))))))
(testing "Move(t1) -> Remove(t2) -> Update(t3) -> Update wins, Move stays retracted"
(let [move-op [:move 1 {:block-uuid block-uuid}]
remove-op [:remove 2 {:block-uuid block-uuid}]
update-op [:update 3 {:block-uuid block-uuid :av-coll []}]]
(client-op/add-ops! "repo" [move-op])
(client-op/add-ops! "repo" [remove-op])
(client-op/add-ops! "repo" [update-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:update result-op)))
(is (= 3 (second (:update result-op))))
(is (nil? (:remove result-op)))
(is (nil? (:move result-op))))))
(testing "Remove(t1) -> Update(t2) -> Move(t3) -> Update+Move wins"
(let [remove-op [:remove 1 {:block-uuid block-uuid}]
update-op [:update 2 {:block-uuid block-uuid :av-coll []}]
move-op [:move 3 {:block-uuid block-uuid}]]
(client-op/add-ops! "repo" [remove-op])
(client-op/add-ops! "repo" [update-op])
(client-op/add-ops! "repo" [move-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:update result-op)))
(is (= 2 (second (:update result-op))))
(is (some? (:move result-op)))
(is (= 3 (second (:move result-op))))
(is (nil? (:remove result-op))))))))))
(deftest add-op-merge-test
(testing "Merge :add and :move"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
add-op [:add 1 {:block-uuid block-uuid :av-coll []}]
move-op [:move 2 {:block-uuid block-uuid}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(client-op/add-ops! "repo" [add-op])
(client-op/add-ops! "repo" [move-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:add result-op)))
(is (= 2 (second (:add result-op))))
(is (nil? (:move result-op)))))))
(testing "Merge :add and :update"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
add-op [:add 1 {:block-uuid block-uuid :av-coll [[:block/title "A" 1 true]]}]
update-op [:update 2 {:block-uuid block-uuid :av-coll [[:block/title "B" 2 true]]}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(client-op/add-ops! "repo" [add-op])
(client-op/add-ops! "repo" [update-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:add result-op)))
(is (= 2 (second (:add result-op))))
(let [av-coll (:av-coll (last (:add result-op)))]
(is (= [[:block/title "A" 1 true] [:block/title "B" 2 true]] av-coll)))
(is (nil? (:update result-op)))))))
(testing "Merge :add and :remove (Newer Remove)"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
add-op [:add 1 {:block-uuid block-uuid :av-coll []}]
remove-op [:remove 2 {:block-uuid block-uuid}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(client-op/add-ops! "repo" [add-op])
(client-op/add-ops! "repo" [remove-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:remove result-op)))
(is (nil? (:add result-op)))))))
(testing "Merge :remove and :add (Newer Add)"
(let [conn (d/create-conn client-op/schema-in-db)
block-uuid (random-uuid)
remove-op [:remove 1 {:block-uuid block-uuid}]
add-op [:add 2 {:block-uuid block-uuid :av-coll []}]]
(with-redefs [worker-state/get-client-ops-conn (constantly conn)]
(client-op/add-ops! "repo" [remove-op])
(client-op/add-ops! "repo" [add-op])
(let [ops (client-op/get&remove-all-block-ops "repo")
result-op (first ops)]
(is (= 1 (count ops)))
(is (some? (:add result-op)))
(is (nil? (:remove result-op))))))))

View File

@@ -1,124 +0,0 @@
(ns frontend.worker.rtc.client-test
(:require
[cljs.test :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.rtc.client :as subject]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]))
(def empty-db (d/empty-db db-schema/schema))
(deftest ^:large-vars/cleanup-todo local-block-ops->remote-ops-test
(testing "user.class/yyy creation"
(let [block-uuid (random-uuid)
db (d/db-with empty-db [{:block/uuid block-uuid,
:block/updated-at 1720017595873,
:block/created-at 1720017595872,
:db/ident :user.class/yyy,
:block/name "yyy",
:block/title "yyy"}])]
(is (= {:move
{:block-uuid block-uuid
:pos [nil nil]}
:update
{:block-uuid block-uuid
:db/ident :user.class/yyy
:pos [nil nil],
:av-coll
[[:block/name "[\"~#'\",\"yyy\"]" 1 true]
[:block/title "[\"~#'\",\"yyy\"]" 1 true]]}}
(:remote-ops
(#'subject/local-block-ops->remote-ops
db
{:move [:move 1 {:block-uuid block-uuid}]
:update
[:update 1 {:block-uuid block-uuid
:av-coll
[[:block/name (ldb/write-transit-str "yyy") 1 true]
[:block/title (ldb/write-transit-str "yyy") 1 true]]}]}))))))
(testing "user.property/xxx creation"
(let [block-uuid (random-uuid)
block-order "b0P"
db (d/db-with empty-db [{:block/uuid #uuid "00000002-5389-0208-3000-000000000000",
:block/updated-at 1741424828774,
:block/created-at 1741424828774,
:logseq.property/built-in? true,
:block/tags [2],
:block/title "Tag",
:db/id 2,
:db/ident :logseq.class/Tag,
:block/name "tag"}
{:block/uuid #uuid "00000002-1038-7670-4800-000000000000",
:block/updated-at 1741424828774,
:block/created-at 1741424828774,
:logseq.property/built-in? true,
:block/tags [2]
:block/title "Property",
:db/id 3,
:db/ident :logseq.class/Property,
:block/name "property"}
{:db/index true
:block/uuid block-uuid
:db/valueType :db.type/ref
:block/updated-at 1716880036491
:block/created-at 1716880036491
:logseq.property/type :number
:db/cardinality :db.cardinality/one
:db/ident :user.property/xxx,
:block/tags [3]
:block/order block-order,
:block/name "xxx",
:block/title "xxx"}])]
(is (=
{:move
{:block-uuid block-uuid
:pos [nil block-order]}
:update
{:block-uuid block-uuid,
:db/ident :user.property/xxx
:pos [nil block-order],
:av-coll
[[:block/name "[\"~#'\",\"xxx\"]" 1 true]
[:block/title "[\"~#'\",\"xxx\"]" 1 true]]}
:update-schema
{:block-uuid block-uuid
:db/ident :user.property/xxx,
:db/cardinality :db.cardinality/one,
:db/valueType :db.type/ref,
:db/index true}}
(:remote-ops
(#'subject/local-block-ops->remote-ops
db
{:move [:move 1 {:block-uuid block-uuid}]
:update
[:update 1 {:block-uuid block-uuid
:av-coll
[[:db/valueType (ldb/write-transit-str :db.type/ref) 1 true]
[:block/name (ldb/write-transit-str "xxx") 1 true]
[:block/title (ldb/write-transit-str "xxx") 1 true]
[:db/cardinality (ldb/write-transit-str :db.cardinality/one) 1 true]
[:db/index (ldb/write-transit-str true) 1 true]]}]}))))))
(testing "user.class/zzz creation (add op)"
(let [block-uuid (random-uuid)
db (d/db-with empty-db [{:block/uuid block-uuid,
:block/updated-at 1720017595873,
:block/created-at 1720017595872,
:db/ident :user.class/zzz,
:block/name "zzz",
:block/title "zzz"}])]
(is (= {:add
{:block-uuid block-uuid
:db/ident :user.class/zzz
:pos [nil nil]
:av-coll
[[:block/name "[\"~#'\",\"zzz\"]" 1 true]
[:block/title "[\"~#'\",\"zzz\"]" 1 true]]}}
(:remote-ops
(#'subject/local-block-ops->remote-ops
db
{:add [:add 1 {:block-uuid block-uuid
:av-coll
[[:block/name (ldb/write-transit-str "zzz") 1 true]
[:block/title (ldb/write-transit-str "zzz") 1 true]]}]})))))))

View File

@@ -1,21 +0,0 @@
(ns frontend.worker.rtc.fixture
(:require [datascript.core :as d]
[frontend.db.conn :as conn]
[frontend.test.helper :as test-helper]
[frontend.worker.db-listener :as worker-db-listener]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.db-listener]
[frontend.worker.state :as worker-state]))
(def listen-test-db-to-gen-rtc-ops-fixture
{:before
#(let [test-db-conn (conn/get-db test-helper/test-db false)]
(assert (some? test-db-conn))
(worker-db-listener/listen-db-changes! test-helper/test-db test-db-conn
{:handler-keys [:gen-rtc-ops]})
(swap! worker-state/*client-ops-conns
assoc test-helper/test-db (d/create-conn client-op/schema-in-db)))
:after
#(when-let [test-db-conn (conn/get-db test-helper/test-db false)]
(d/unlisten! test-db-conn :frontend.worker.db-listener/listen-db-changes!)
(swap! worker-state/*client-ops-conns dissoc test-helper/test-db))})

View File

@@ -1,258 +0,0 @@
(ns frontend.worker.rtc.gen-client-op-test
(:require [cljs.test :as t :refer [deftest is testing]]
[clojure.set :as set]
[datascript.core :as d]
[frontend.db.conn :as conn]
[frontend.state :as state]
[frontend.test.helper :as test-helper]
[frontend.worker.handler.page :as worker-page]
[frontend.worker.rtc.client-op :as client-op]
[frontend.worker.rtc.fixture :as r.fixture]
[frontend.worker.rtc.gen-client-op :as subject]
[logseq.db.test.helper :as db-test]
[logseq.outliner.batch-tx :as batch-tx]
[logseq.outliner.core :as outliner-core]
[meander.epsilon :as me]))
(t/use-fixtures :each
test-helper/start-and-destroy-db-map-fixture
r.fixture/listen-test-db-to-gen-rtc-ops-fixture)
(defn- tx-data=>e->a->add?->v->t
[tx-data]
(let [datom-vec-coll (map vec tx-data)
id->same-entity-datoms (group-by first datom-vec-coll)]
(update-vals id->same-entity-datoms #'subject/entity-datoms=>a->add?->v->t)))
(deftest ^:large-vars/cleanup-todo entity-datoms=>ops-test
(testing "remove whiteboard page-block"
(let [conn (db-test/create-conn)
block-uuid (random-uuid)
_create-whiteboard-page-block
(d/transact! conn [{:block/uuid block-uuid
:block/tags :logseq.class/Whiteboard
:block/name "block-name"
:block/title "BLOCK-NAME"}])
remove-whiteboard-page-block
(d/transact! conn [[:db/retractEntity [:block/uuid block-uuid]]])
r (#'subject/entity-datoms=>ops (:db-before remove-whiteboard-page-block)
(:db-after remove-whiteboard-page-block)
(tx-data=>e->a->add?->v->t (:tx-data remove-whiteboard-page-block))
nil
(map vec (:tx-data remove-whiteboard-page-block)))]
(is (= [[:remove-page {:block-uuid block-uuid}]]
(map (fn [[op-type _t op-value]] [op-type op-value]) r)))))
(testing "create page block"
(let [conn (db-test/create-conn)
block-uuid (random-uuid)
tx-data [[:db/add 1000000 :block/uuid block-uuid]
[:db/add 1000000 :block/name "page-name"]
[:db/add 1000000 :block/title "Page Title"]
[:db/add 1000000 :block/created-at 1716882111476]
[:db/add 1000000 :block/updated-at 1716882111476]]
{:keys [db-before db-after tx-data]} (d/transact! conn tx-data)
ops (#'subject/entity-datoms=>ops db-before db-after
(tx-data=>e->a->add?->v->t tx-data)
nil
(map vec tx-data))]
(is (= [[:update-page {:block-uuid block-uuid}]
[:add {:block-uuid block-uuid
:av-coll
(set [[:block/updated-at "[\"~#'\",1716882111476]"]
[:block/created-at "[\"~#'\",1716882111476]"]
[:block/title "[\"~#'\",\"Page Title\"]"]])}]]
(map (fn [[op-type _t op-value]]
[op-type (cond-> op-value
(:av-coll op-value)
(assoc :av-coll (set (map #(take 2 %) (:av-coll op-value)))))])
ops)))))
(testing "update-schema op"
(let [conn (db-test/create-conn)
tx-data [[:db/add 1000000 :db/index true]
[:db/add 1000000 :block/uuid #uuid "66558abf-6512-469d-9e83-8f1ba0be9305"]
[:db/add 1000000 :db/valueType :db.type/ref]
[:db/add 1000000 :block/updated-at 1716882111476]
[:db/add 1000000 :block/created-at 1716882111476]
[:db/add 1000000 :logseq.property/type :number]
[:db/add 1000000 :db/cardinality :db.cardinality/one]
[:db/add 1000000 :db/ident :user.property/qqq]
[:db/add 1000000 :block/tags :logseq.class/Property]
[:db/add 1000000 :block/order "b0T"]
[:db/add 1000000 :block/name "qqq"]
[:db/add 1000000 :block/title "qqq"]
[:db/add 1000000 :logseq.property/ignored-attr-x "111"]]
{:keys [db-before db-after tx-data]} (d/transact! conn tx-data)
ops (#'subject/entity-datoms=>ops db-before db-after
(tx-data=>e->a->add?->v->t tx-data)
#{:logseq.property/ignored-attr-x}
(map vec tx-data))]
(is (=
[[:update-page {:block-uuid #uuid "66558abf-6512-469d-9e83-8f1ba0be9305"}]
[:add {:block-uuid #uuid "66558abf-6512-469d-9e83-8f1ba0be9305"
:av-coll
[[:db/index "[\"~#'\",true]"]
[:logseq.property/type "[\"~#'\",\"~:number\"]"]
[:db/valueType "[\"~#'\",\"~:db.type/ref\"]"]
[:block/updated-at "[\"~#'\",1716882111476]"]
[:block/created-at "[\"~#'\",1716882111476]"]
[:block/tags #uuid "00000002-1038-7670-4800-000000000000"]
[:block/title "[\"~#'\",\"qqq\"]"]
[:db/cardinality "[\"~#'\",\"~:db.cardinality/one\"]"]
;; [:db/ident "[\"~#'\",\"~:user.property/qqq\"]"]
]}]]
(map (fn [[op-type _t op-value]]
[op-type (cond-> op-value
(:av-coll op-value)
(assoc :av-coll (map #(take 2 %) (:av-coll op-value))))])
ops)))))
(testing "create user-class"
(let [conn (db-test/create-conn)
tx-data [[:db/add 1000000 :block/uuid #uuid "66856a29-6eb3-4122-af97-8580a853c6a6" 536870954]
[:db/add 1000000 :block/updated-at 1720019497643 536870954]
[:db/add 1000000 :logseq.property.class/extends :logseq.class/Root 536870954]
[:db/add 1000000 :block/created-at 1720019497643 536870954]
[:db/add 1000000 :db/ident :user.class/zzz 536870954]
[:db/add 1000000 :block/tags :logseq.class/Tag 536870954]
[:db/add 1000000 :block/name "zzz" 536870954]
[:db/add 1000000 :block/title "zzz" 536870954]]
{:keys [db-before db-after tx-data]} (d/transact! conn tx-data)
ops (#'subject/entity-datoms=>ops db-before db-after
(tx-data=>e->a->add?->v->t tx-data)
nil
(map vec tx-data))]
(is (=
[[:update-page {:block-uuid #uuid "66856a29-6eb3-4122-af97-8580a853c6a6"}]
[:add {:block-uuid #uuid "66856a29-6eb3-4122-af97-8580a853c6a6",
:av-coll
(set
[[:block/updated-at "[\"~#'\",1720019497643]"]
[:block/created-at "[\"~#'\",1720019497643]"]
[:block/tags #uuid "00000002-5389-0208-3000-000000000000"]
[:block/title "[\"~#'\",\"zzz\"]"]
[:logseq.property.class/extends #uuid "00000002-2737-8382-7000-000000000000"]
;;1. shouldn't have :db/ident, :db/ident is special, will be handled later
])}]]
(map (fn [[op-type _t op-value]]
[op-type (cond-> op-value
(:av-coll op-value)
(assoc :av-coll (set (map #(take 2 %) (:av-coll op-value)))))])
ops))))))
(deftest listen-db-changes-and-validate-generated-rtc-ops
(letfn [(ops-coll=>block-uuid->op-types [ops-coll]
(into {}
(map (fn [m]
[(:block/uuid m) (set (keys (dissoc m :block/uuid)))]))
ops-coll))]
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
[page-uuid block-uuid1 block-uuid2] (repeatedly random-uuid)]
(testing "add page"
(worker-page/create! conn "TEST-PAGE" {:uuid page-uuid})
(is (some? (d/pull @conn '[*] [:block/uuid page-uuid])))
(is (= {page-uuid #{:add :update-page}}
(ops-coll=>block-uuid->op-types (client-op/get&remove-all-block-ops repo)))))
(testing "add blocks to this page"
(let [target-entity (d/entity @conn [:block/uuid page-uuid])]
(batch-tx/with-batch-tx-mode conn
{:persist-op? true}
(outliner-core/insert-blocks! conn [{:block/uuid block-uuid1
:block/title "block1"}
{:block/uuid block-uuid2
:block/title "block2"}]
target-entity
{:sibling? false :keep-uuid? true}))
(is (=
{block-uuid1 #{:add}
block-uuid2 #{:add}}
(ops-coll=>block-uuid->op-types (client-op/get&remove-all-block-ops repo))))))
(testing "delete a block"
(batch-tx/with-batch-tx-mode conn
{:persist-op? true}
(outliner-core/delete-blocks! conn [(d/entity @conn [:block/uuid block-uuid1])] {}))
(is (=
{block-uuid1 #{:remove}}
(ops-coll=>block-uuid->op-types (client-op/get&remove-all-block-ops repo))))))))
(deftest generate-rtc-ops-from-property-entity-test
(let [repo (state/get-current-repo)
db (conn/get-db repo true)
ent (d/entity db :logseq.property.view/feature-type)
av-coll-attrs #{:logseq.property/type :logseq.property/built-in?
:logseq.property/public? :logseq.property/hide?
:block/tags :block/title :db/cardinality}]
#_{:clj-kondo/ignore [:unresolved-symbol :invalid-arity]}
(is (->> (me/find (subject/generate-rtc-ops-from-property-entities [ent])
([:update-page . _ ...] [:add _ {:block-uuid ?block-uuid :av-coll ([!av-coll-attrs . _ ...] ...)}])
!av-coll-attrs)
set
(set/difference av-coll-attrs)
empty?))))
(deftest generate-rtc-ops-from-class-entity-test
(let [repo (state/get-current-repo)
db (conn/get-db repo true)
ent (d/entity db :logseq.class/Template)
av-coll-attrs #{:logseq.property.class/properties :logseq.property/built-in? :logseq.property.class/extends
:block/tags :block/title}]
#_{:clj-kondo/ignore [:unresolved-symbol :invalid-arity]}
(is (->> (me/find (subject/generate-rtc-ops-from-class-entities [ent])
([:update-page . _ ...] [:add _ {:block-uuid ?block-uuid :av-coll ([!av-coll-attrs . _ ...] ...)}])
!av-coll-attrs)
set
(set/difference av-coll-attrs)
empty?))))
(deftest remove-conflict-same-block-datoms-test
(testing "remove conflict entity-datoms for same-block"
(let [block-uuid #uuid "693ec519-e73e-4f2c-b517-7e75ca2c64da"
datoms-182 [[182 :logseq.property/created-by-ref 161 536870976 false]
[182 :block/created-at 1765721369994 536870976 false]
[182 :block/parent 162 536870976 false]
[182 :block/order "aF" 536870976 false]
[182 :block/tx-id 536870972 536870976 false]
[182 :block/page 162 536870976 false]
[182 :block/uuid block-uuid 536870976 false]
[182 :block/title "" 536870976 false]
[182 :block/updated-at 1765721369994 536870976 false]]
datoms-185 [[185 :block/parent 162 536870976 true]
[185 :logseq.property/created-by-ref 161 536870976 true]
[185 :block/title "111" 536870976 true]
[185 :logseq.property.embedding/hnsw-label-updated-at 0 536870976 true]
[185 :block/order "aG" 536870976 true]
[185 :block/page 162 536870976 true]
[185 :block/created-at 1765721370449 536870976 true]
[185 :block/updated-at 1765721370449 536870976 true]
[185 :block/uuid block-uuid 536870976 true]
[185 :block/tx-id 536870976 536870977 true]]
same-entity-datoms-coll [datoms-182 datoms-185]
result (subject/remove-conflict-same-block-datoms same-entity-datoms-coll)]
(is (= 1 (count result)))
(is (= 185 (nth (ffirst result) 0)))
(is (= datoms-185 (first result)))))
(testing "remove conflict entity-datoms should preserve order"
(let [block-uuid1 #uuid "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
block-uuid2 #uuid "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"
datoms-1 [[100 :block/uuid block-uuid1 1 true]]
datoms-2 [[101 :block/uuid block-uuid2 2 true]]
datoms-3 [[102 :block/uuid block-uuid2 2 true]] ;; Conflict with datoms-2, wins (higher ID)
same-entity-datoms-coll [datoms-1 datoms-2 datoms-3]
result (subject/remove-conflict-same-block-datoms same-entity-datoms-coll)]
(is (= 2 (count result)))
(is (= datoms-1 (first result)))
(is (= datoms-3 (second result)))))
(testing "remove conflict entity-datoms should prefer add over retract"
(let [block-uuid1 #uuid "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
datoms-1 [[100 :block/uuid block-uuid1 1 true]]
datoms-2 [[101 :block/uuid block-uuid1 1 false]]
same-entity-datoms-coll [datoms-1 datoms-2]
result (subject/remove-conflict-same-block-datoms same-entity-datoms-coll)]
(is (= 1 (count result)))
(is (= datoms-1 (first result))))))

View File

@@ -1,59 +0,0 @@
(ns frontend.worker.rtc.migrate-test
(:require ["fs" :as fs-node]
;; [cljs.pprint :as pp]
[cljs.test :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.db.migrate :as db-migrate]
[frontend.worker.rtc.migrate :as rtc-migrate]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]))
(defn- get-specific-result
[upgrade-result-coll version]
(let [parsed-version (db-schema/parse-schema-version version)]
(some (fn [{:keys [tx-data] :as upgrade-result}]
(when (some (fn [datom]
(and (= :kv/value (:a datom))
(= parsed-version (db-schema/parse-schema-version (:v datom)))))
tx-data)
upgrade-result))
upgrade-result-coll)))
(deftest migration-results=>client-ops
(testing "65.2 => 65.11"
(let [db-transit (str (fs-node/readFileSync "src/test/migration/65.2.transit"))
db (ldb/read-transit-str db-transit)
conn (d/conn-from-db db)
migration-result (db-migrate/migrate conn {:target-version "65.11"})
client-ops (rtc-migrate/migration-results=>client-ops migration-result)]
;; (prn :migration-result "================================================================")
;; (pp/pprint (merge (select-keys migration-result [:from-version :to-version])
;; {:upgrade-result-coll
;; (map (fn [r] [(:tx-data r) (select-keys (:migrate-updates r) [:rename-db-idents])])
;; (:upgrade-result-coll migration-result))}))
;; (prn :client-ops "================================================================")
;; (pp/pprint client-ops)
(testing "check schema-version"
(let [last-op (last client-ops)
schema-version-update? (= :update-kv-value (first last-op))]
(is schema-version-update? "The last op should be to update schema version")
(when schema-version-update?
(is (= :logseq.kv/schema-version (get-in last-op [2 :db-ident])) "The schema version key should be correct")
(is (= (:to-version migration-result) (get-in last-op [2 :value])) "The schema version should be updated to the new version"))))
(testing "check 65.10"
(let [upgrade-result-65-10 (get-specific-result (:upgrade-result-coll migration-result) "65.10")
{:keys [tx-data db-after]} upgrade-result-65-10]
(is (some? upgrade-result-65-10))
(let [tx-id-65-10 (:tx (first tx-data))
ents (map (partial d/entity db-after)
(set (keep (fn [datom] (when (:added datom) (:e datom))) tx-data)))
block-uuids-in-tx-data (set (keep :block/uuid ents))
block-uuids-in-client-ops (set
(keep
(fn [[op tx-id value]]
(when (and (= tx-id tx-id-65-10)
(contains? #{:update :update-page :move} op))
(:block-uuid value)))
client-ops))]
(is (= block-uuids-in-tx-data block-uuids-in-client-ops))))))))

View File

@@ -1,69 +0,0 @@
(ns frontend.worker.rtc.remote-update-test
(:require [cljs.test :as t :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.rtc.remote-update :as subject]
[logseq.db :as ldb]
[logseq.db.frontend.schema :as db-schema]
[logseq.db.sqlite.create-graph :as sqlite-create-graph]))
(deftest remote-op-value->tx-data-test
(let [[block-uuid ref-uuid1 ref-uuid2] (repeatedly random-uuid)
db (d/db-with (d/empty-db db-schema/schema)
(sqlite-create-graph/build-db-initial-data "{}" {}))]
(testing ":block/title"
(let [db (d/db-with db [{:block/uuid block-uuid
:block/title "local-content"}])
op-value {:block/title (ldb/write-transit-str "remote-content")}]
(is (= [[:db/add (:db/id (d/entity db [:block/uuid block-uuid])) :block/title "remote-content"]]
(#'subject/remote-op-value->tx-data db (d/entity db [:block/uuid block-uuid]) op-value nil)))))
(testing ":block/tags (1)"
(let [db (d/db-with db [{:block/uuid block-uuid}
{:block/uuid ref-uuid1}
{:block/uuid ref-uuid2}])
op-value {:block/tags [ref-uuid1 ref-uuid2]}
[db-id ref1 ref2] (map :db/id (d/pull-many db [:db/id] [[:block/uuid block-uuid]
[:block/uuid ref-uuid1]
[:block/uuid ref-uuid2]]))]
(is (= #{[:db/add db-id :block/tags ref1] [:db/add db-id :block/tags ref2]}
(set (#'subject/remote-op-value->tx-data db (d/entity db [:block/uuid block-uuid]) op-value nil))))))
(testing ":block/tags (2)"
(let [db (d/db-with db [{:db/id "ref1"
:block/uuid ref-uuid1}
{:block/uuid ref-uuid2}
{:block/uuid block-uuid
:block/tags ["ref1"]}])
op-value {:block/tags [ref-uuid2]}
[db-id ref2] (map :db/id (d/pull-many db [:db/id] [[:block/uuid block-uuid]
[:block/uuid ref-uuid2]]))]
(is (= #{[:db/retract db-id :block/tags [:block/uuid ref-uuid1]]
[:db/add db-id :block/tags ref2]}
(set (#'subject/remote-op-value->tx-data db (d/entity db [:block/uuid block-uuid]) op-value nil))))))
(testing ":block/tags (3): ref2 not exist"
(let [db (d/db-with db [{:db/id "ref1"
:block/uuid ref-uuid1}
{:block/uuid block-uuid
:block/tags ["ref1"]}])
op-value {:block/tags [ref-uuid2]}]
(is (= #{[:db/retract (:db/id (d/entity db [:block/uuid block-uuid])) :block/tags [:block/uuid ref-uuid1]]}
(set (#'subject/remote-op-value->tx-data db (d/entity db [:block/uuid block-uuid]) op-value nil))))))
(testing ":logseq.property/status, op-value don't have this attr, means remove this attr"
(let [db (d/db-with db [{:db/id "ref1"
:block/uuid ref-uuid1}
{:block/uuid block-uuid
:logseq.property/status "ref1"}])
op-value {}
ent (d/entity db [:block/uuid block-uuid])]
(is (= [[:db/retract (:db/id ent) :logseq.property/status]]
(#'subject/remote-op-value->tx-data db ent op-value nil)))))
(testing "dont update ignored attrs"
(let [db (d/db-with db [{:block/uuid block-uuid
:logseq.property.view/feature-type :aaa}])
op-value {}
ent (d/entity db [:block/uuid block-uuid])
ignore-attr-set #{:logseq.property.view/feature-type}]
(is (empty? (#'subject/remote-op-value->tx-data db ent op-value ignore-attr-set)))
(is (= [[:db/retract (:db/id ent) :logseq.property.view/feature-type]]
(#'subject/remote-op-value->tx-data db ent op-value nil)))))))

View File

@@ -1,456 +0,0 @@
(ns frontend.worker.rtc.rtc-fns-test
(:require [clojure.test :as t :refer [deftest is testing use-fixtures]]
[datascript.core :as d]
[frontend.db.conn :as conn]
[frontend.state :as state]
[frontend.test.helper :as test-helper]
[frontend.worker.fixtures :as worker-fixtures]
[frontend.worker.rtc.remote-update :as r.remote]
[logseq-schema.rtc-api-schema :as rtc-api-schema]
[logseq.db :as ldb]
[logseq.outliner.core :as outliner-core]
[logseq.outliner.transaction :as outliner-tx]))
(use-fixtures :each
test-helper/start-and-destroy-db
(worker-fixtures/listen-test-db-fixture [:sync-db-to-main-thread]))
(deftest ^:large-vars/cleanup-todo update-remote-data-by-local-unpushed-ops-test
(testing "case1"
(let [[uuid1 uuid2] (repeatedly (comp str random-uuid))
affected-blocks-map
{uuid1
{:op :move
:self uuid1
:parents [uuid2]
:block/order "a0"
:block/title "content-str"}}
unpushed-ops
[[:update 1 {:block-uuid uuid1
:av-coll [[:block/title "new-content-str" 1 true]]}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (= {uuid1
{:op :move
:self uuid1
:parents [uuid2]
:block/order "a0"
:block/title "new-content-str"}}
r))))
(testing "case2"
(let [[uuid1] (repeatedly (comp str random-uuid))
affected-blocks-map
{uuid1
{:op :remove
:block-uuid uuid1}}
unpushed-ops
[[:move 1 {:block-uuid uuid1}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (empty? r))))
(testing "case3"
(let [[uuid1 uuid2] (repeatedly (comp str random-uuid))
affected-blocks-map
{uuid1
{:op :move
:self uuid1
:parents [uuid2]
:block/order "a0"}}
unpushed-ops
[[:move 1 {:block-uuid uuid1}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (empty? r))))
(testing "case4: update remote :update-attrs op"
(let [[uuid1 uuid2] (repeatedly random-uuid)
affected-blocks-map
{uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]
:block/order "a0"
:block/title "update content"}}
unpushed-ops
[[:move 1 {:block-uuid uuid1}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (= {uuid1
{:op :update-attrs
:self uuid1
:block/title "update content"}}
r))))
(testing "case5: card-many+ref attr"
(let [[uuid1 uuid2] (repeatedly random-uuid)
affected-blocks-map
{uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]
:user.property/ppp [#uuid "6752bdee-7963-4a6a-84a4-86cd456b470c"
#uuid "6752bdf0-ee32-40af-8abb-3f8d179ba367"]}}
unpushed-ops
[[:update 536871132
{:block-uuid uuid1
:av-coll
[[:user.property/ppp
#uuid "6752bdee-7963-4a6a-84a4-86cd456b470c"
536871128
true]
[:user.property/ppp
#uuid "6752bdf0-ee32-40af-8abb-3f8d179ba367"
536871132
false]
[:user.property/ppp
#uuid "6752bdf0-ee32-40af-8abb-3f8d179ba888"
536871132
true]]}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (= {uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]
:user.property/ppp
[#uuid "6752bdee-7963-4a6a-84a4-86cd456b470c"
#uuid "6752bdf0-ee32-40af-8abb-3f8d179ba888"]}}
r))))
(testing "case6: toggle status"
(let [[uuid1 uuid2 status-value-uuid] (repeatedly random-uuid)
affected-blocks-map
{uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]}}
unpushed-ops
[[:update
536872312
{:block-uuid uuid1
:av-coll
[[:logseq.property/status status-value-uuid 536872312 true]]}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (= {uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]
:logseq.property/status [status-value-uuid]}}
r))))
(testing "case7: toggle status(2)"
(let [[uuid1 uuid2 status-value-uuid1 status-value-uuid2] (repeatedly random-uuid)
affected-blocks-map
{uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]}}
unpushed-ops
[[:update
536872314
{:block-uuid uuid1
:av-coll
[[:logseq.property/status status-value-uuid1 536872312 true]
[:logseq.property/status status-value-uuid1 536872312 false]
[:logseq.property/status status-value-uuid2 536872314 true]]}]]
r (#'r.remote/update-remote-data-by-local-unpushed-ops affected-blocks-map unpushed-ops)]
(is (= {uuid1
{:op :update-attrs
:self uuid1
:parents [uuid2]
:logseq.property/status [status-value-uuid2]}}
r)))))
(defn- apply-move-ops!
[conn move-ops]
(ldb/transact-with-temp-conn!
conn
{}
(fn [temp-conn _*batch-tx-data]
(#'r.remote/apply-remote-move-ops temp-conn move-ops))))
(deftest apply-remote-move-ops-test
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
opts {:persist-op? false
:transact-opts {:repo repo
:conn conn}}
page-name "apply-remote-move-ops-test"
[page-uuid
uuid1-client uuid2-client
uuid1-remote uuid2-remote] (repeatedly random-uuid)]
(test-helper/create-page! page-name {:redirect? false :uuid page-uuid})
(outliner-tx/transact!
opts
(outliner-core/insert-blocks!
conn
[{:block/uuid uuid1-client
:block/title "uuid1-client"
:block/order "a1"
:block/parent [:block/uuid page-uuid]}
{:block/uuid uuid2-client
:block/title "uuid2-client"
:block/order "a2"
:block/parent [:block/uuid page-uuid]}]
(ldb/get-page @conn page-name)
{:sibling? false :keep-uuid? true}))
(testing "apply-remote-move-ops-test1"
(let [data-from-ws {:req-id "req-id"
:t 1 ;; not used
:t-before 0 ;; not used
:affected-blocks
{uuid1-remote {:op :move
:self uuid1-remote
:parents [page-uuid]
:block/order "a0"
:block/title (ldb/write-transit-str "")
:block/created-at (js/Date.now)
:block/updated-at (js/Date.now)}}}
move-ops (#'r.remote/move-ops-map->sorted-move-ops
(:move-ops-map
(#'r.remote/affected-blocks->diff-type-ops
repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws) data-from-ws)
(apply-move-ops! conn move-ops)
(let [page-blocks (ldb/get-page-blocks @conn (:db/id (ldb/get-page @conn page-name)) {})]
(is (= #{uuid1-remote uuid1-client uuid2-client} (set (map :block/uuid page-blocks)))
[uuid1-remote uuid1-client uuid2-client])
(is (= page-uuid (:block/uuid (:block/parent (d/entity @conn [:block/uuid uuid1-remote]))))))))
(testing "apply-remote-move-ops-test2"
(let [data-from-ws {:req-id "req-id"
:t 1 ;; not used
:t-before 0
:affected-blocks
{uuid2-remote {:op :move
:self uuid2-remote
:parents [uuid1-client]
:block/order "a0"
:block/title (ldb/write-transit-str "")
:block/created-at (js/Date.now)
:block/updated-at (js/Date.now)}
uuid1-remote {:op :move
:self uuid1-remote
:parents [uuid2-remote]
:block/order "a1"
:block/title (ldb/write-transit-str "")
:block/created-at (js/Date.now)
:block/updated-at (js/Date.now)}}}
move-ops (#'r.remote/move-ops-map->sorted-move-ops
(:move-ops-map
(#'r.remote/affected-blocks->diff-type-ops
repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(apply-move-ops! conn move-ops)
(let [page-blocks (ldb/get-page-blocks @conn (:db/id (ldb/get-page @conn page-name)) {})]
(is (= #{uuid1-remote uuid2-remote uuid1-client uuid2-client} (set (map :block/uuid page-blocks))))
(is (= ["a0" "a1"]
(mapv (fn [uuid*] (:block/order (d/entity @conn [:block/uuid uuid*])))
[uuid2-remote uuid1-remote]))))))))
(deftest apply-remote-remove-ops-test
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
opts {:persist-op? false
:transact-opts {:repo repo
:conn conn}}
page-name "apply-remote-remove-ops-test"
[page-uuid
uuid1-client uuid2-client
uuid1-not-exist] (repeatedly random-uuid)]
(test-helper/create-page! page-name {:redirect? false})
(outliner-tx/transact!
opts
(outliner-core/insert-blocks!
conn
[{:block/uuid uuid1-client :block/title "uuid1-client"
:block/left [:block/uuid page-uuid]
:block/parent [:block/uuid page-uuid]}
{:block/uuid uuid2-client :block/title "uuid2-client"
:block/left [:block/uuid uuid1-client]
:block/parent [:block/uuid page-uuid]}]
(ldb/get-page @conn page-name)
{:sibling? false :keep-uuid? true}))
(testing "apply-remote-remove-ops-test1"
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{uuid1-client {:op :remove
:block-uuid uuid1-not-exist}}}
remove-ops
(vals
(:remove-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-remove-ops conn remove-ops)
(let [page-blocks (ldb/get-page-blocks @conn (:db/id (ldb/get-page @conn page-name)) {})]
(is (= #{uuid1-client uuid2-client} (set (map :block/uuid page-blocks)))))))
(testing "apply-remote-remove-ops-test2"
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{uuid1-client {:op :remove
:block-uuid uuid1-client}}}
remove-ops (vals
(:remove-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-remove-ops conn remove-ops)
(let [page-blocks (ldb/get-page-blocks @conn (:db/id (ldb/get-page @conn page-name)) {})]
(is (= #{uuid2-client} (set (map :block/uuid page-blocks)))))))))
(deftest apply-remote-remove-ops-test2
(testing "
origin:
- 1
- 2
- 3
client: ;; move 3 as child of 2
- 1
- 2
- 3
server: ;; remove 2
- 1
- 3
result:
- 3
- 1"
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
opts {:persist-op? false
:transact-opts {:repo repo
:conn conn}}
page-name "apply-remote-remove-ops-test2"
[page-uuid
uuid1 uuid2 uuid3] (repeatedly random-uuid)]
(test-helper/create-page! page-name {:redirect? false})
(outliner-tx/transact!
opts
(outliner-core/insert-blocks!
;; - 1
;; - 2
;; - 3
conn
[{:block/uuid uuid1 :block/title "1"
:block/order "a0"
:block/parent [:block/uuid page-uuid]}
{:block/uuid uuid2 :block/title "2"
:block/order "a1"
:block/parent [:block/uuid page-uuid]}
{:block/uuid uuid3 :block/title "3"
:block/order "a0"
:block/parent [:block/uuid uuid2]}]
(ldb/get-page @conn page-name)
{:sibling? false :keep-uuid? true}))
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{uuid2 {:op :remove
:block-uuid uuid2}}}
remove-ops
(vals
(:remove-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-remove-ops conn remove-ops)
(let [page-blocks (ldb/get-page-blocks @conn (:db/id (ldb/get-page @conn page-name)))]
(is (= [uuid3 uuid1] (map :block/uuid (sort-by :block/order page-blocks)))))))))
(deftest apply-remote-update&remove-page-ops-test
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
[page1-uuid ;; page2-uuid page3-uuid page4-uuid
] (repeatedly random-uuid)
page-tags [(:block/uuid (d/entity @conn :logseq.class/Page))]]
(testing "apply-remote-update-page-ops-test1"
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{page1-uuid {:op :update-page
:self page1-uuid
:page-name (ldb/write-transit-str (str "X" page1-uuid))
:block/title (ldb/write-transit-str (str "X" page1-uuid))
:block/tags page-tags
:block/created-at (js/Date.now)
:block/updated-at (js/Date.now)}}}
update-page-ops (vals
(:update-page-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-update-page-ops repo conn update-page-ops)
(is (= page1-uuid (:block/uuid (d/entity @conn [:block/uuid page1-uuid]))))))
(testing "apply-remote-update-page-ops-test2"
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{page1-uuid {:op :update-page
:self page1-uuid
:page-name (ldb/write-transit-str (str page1-uuid "-rename"))
:block/title (ldb/write-transit-str (str page1-uuid "-rename"))
:block/tags page-tags
:block/created-at (js/Date.now)
:block/updated-at (js/Date.now)}}}
update-page-ops (vals
(:update-page-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-update-page-ops repo conn update-page-ops)
(is (= (str page1-uuid "-rename") (:block/name (d/entity @conn [:block/uuid page1-uuid]))))))
(testing "apply-remote-remove-page-ops-test1"
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{page1-uuid {:op :remove-page
:block-uuid page1-uuid}}}
remove-page-ops (vals
(:remove-page-ops-map
(#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-remove-page-ops conn remove-page-ops)
(is (nil? (d/entity @conn [:block/uuid page1-uuid])))))))
;; TODO: add back once page merge get supported
(comment
(deftest same-name-two-pages-merge-test
(let [repo (state/get-current-repo)
conn (conn/get-db repo false)
date-formatter (worker-state/get-date-formatter)
opts {:persist-op? false
:transact-opts {:repo repo
:conn conn}}
page-name "same-name-page-test"
[page1-uuid page2-uuid
uuid1-client uuid2-client
uuid1-remote uuid2-remote] (repeatedly random-uuid)]
(test-helper/create-page! page-name {:redirect? false})
(outliner-tx/transact!
opts
(outliner-core/insert-blocks!
repo
conn
[{:block/uuid uuid1-client
:block/title "uuid1-client"
:block/left [:block/uuid page1-uuid]
:block/parent [:block/uuid page1-uuid]}
{:block/uuid uuid2-client
:block/title "uuid2-client"
:block/left [:block/uuid uuid1-client]
:block/parent [:block/uuid page1-uuid]}]
(ldb/get-page @conn page-name)
{:sibling? false :keep-uuid? true}))
(let [data-from-ws {:req-id "req-id" :t 1 :t-before 0
:affected-blocks
{page2-uuid {:op :update-page
:self page2-uuid
:page-name page-name
:block/title page-name}
uuid1-remote {:op :move
:self uuid1-remote
:parents [page2-uuid]
:left page2-uuid
:block/title "uuid1-remote"}
uuid2-remote {:op :move
:self uuid2-remote
:parents [page2-uuid]
:left uuid1-remote
:block/title "uuid2-remote"}}}
all-ops (#'r.remote/affected-blocks->diff-type-ops repo (:affected-blocks data-from-ws))
update-page-ops (vals (:update-page-ops-map all-ops))
move-ops (#'r.remote/move-ops-map->sorted-move-ops (:move-ops-map all-ops))]
(is (rtc-api-schema/data-from-ws-validator data-from-ws))
(#'r.remote/apply-remote-update-page-ops repo conn date-formatter update-page-ops)
(#'r.remote/apply-remote-move-ops repo conn date-formatter move-ops)
(let [page (ldb/get-page @conn page-name)]
(is (= #{uuid1-client uuid2-client uuid1-remote uuid2-remote}
(set (map :block/uuid (ldb/get-page-blocks @conn (:db/id page) {})))))
(is (= page2-uuid (:block/uuid page))))))))

View File

@@ -0,0 +1,30 @@
(ns frontend.worker.sync.crypt-test
(:require [cljs.test :refer [deftest is async]]
[frontend.common.crypt :as crypt]
[frontend.worker.sync.crypt :as sync-crypt]
[logseq.db :as ldb]
[promesa.core :as p]))
(defn- <encrypt-text-for-snapshot
[aes-key value]
(p/let [encrypted (crypt/<encrypt-text aes-key (ldb/write-transit-str value))]
(ldb/write-transit-str encrypted)))
;; bb dev:test -v frontend.worker.sync.crypt-test works, however bb dev:lint-and-test failed
(deftest ^:fix-me decrypt-snapshot-rows-test
(async done
(-> (p/let [aes-key (crypt/<generate-aes-key)
encrypted-title (<encrypt-text-for-snapshot aes-key "Title")
encrypted-name (<encrypt-text-for-snapshot aes-key "name")
raw-content (ldb/write-transit-str
{:keys [[1 :block/title encrypted-title 1000]
[1 :block/title encrypted-name 1000]]})
rows [["addr-1" raw-content nil]]
[[_ decrypted-content _]] (sync-crypt/<decrypt-snapshot-rows-batch aes-key rows)
keys (:keys (ldb/read-transit-str decrypted-content))]
(is (= "Title" (nth (first keys) 2)))
(is (= "name" (nth (second keys) 2)))
(done))
(p/catch (fn [e]
(is false (str e))
(done))))))