diff --git a/deps/common/.clj-kondo/config.edn b/deps/common/.clj-kondo/config.edn index b609a047b8..1e5862e33a 100644 --- a/deps/common/.clj-kondo/config.edn +++ b/deps/common/.clj-kondo/config.edn @@ -6,5 +6,6 @@ :consistent-alias {:aliases {clojure.string string}}} + :lint-as {promesa.core/let clojure.core/let} :skip-comments true :output {:progress true}} diff --git a/deps/common/src/logseq/common/authorization.cljs b/deps/common/src/logseq/common/authorization.cljs index 109010b270..71bba7b609 100644 --- a/deps/common/src/logseq/common/authorization.cljs +++ b/deps/common/src/logseq/common/authorization.cljs @@ -41,22 +41,22 @@ (defn- get-jwks-keys [url & {:keys [force?]}] (let [now (get-now-ms) - {:keys [url cached-url keys fetched-at]} {:cached-url (:url @*jwks-cache) - :url url - :keys (:keys @*jwks-cache) - :fetched-at (:fetched-at @*jwks-cache)} + {:keys [url cached-url jwks-keys fetched-at]} {:cached-url (:url @*jwks-cache) + :url url + :jwks-keys (:keys @*jwks-cache) + :fetched-at (:fetched-at @*jwks-cache)} fresh? (and (not force?) (= cached-url url) - keys + jwks-keys (< (- now fetched-at) jwks-ttl-ms))] (if fresh? - (p/resolved keys) + (p/resolved jwks-keys) (p/let [jwks-resp (js/fetch url) _ (when-not (.-ok jwks-resp) (throw (ex-info "jwks" {}))) jwks (.json jwks-resp) - keys (or (aget jwks "keys") #js [])] - (reset! *jwks-cache {:url url :keys keys :fetched-at now}) - keys)))) + jwks-keys (or (aget jwks "keys") #js [])] + (reset! *jwks-cache {:url url :keys jwks-keys :fetched-at now}) + jwks-keys)))) (defn- base64url->uint8array [input] (let [pad (if (pos? (mod (count input) 4)) @@ -84,40 +84,40 @@ #js ["verify"])) (defn verify-jwt [token env] - (let [parts (string/split token #"\.") - _ (when (not= 3 (count parts)) (throw (ex-info "invalid" {}))) - header-part (nth parts 0) - payload-part (nth parts 1) - signature-part (nth parts 2) - now-ms (get-now-ms) - now-s (js/Math.floor (/ now-ms 1000))] - (if-let [cached (cached-token token now-s now-ms)] - (p/resolved cached) - (p/let [header (decode-jwt-part header-part) - payload (decode-jwt-part payload-part) - issuer (aget env "COGNITO_ISSUER") - client-id (aget env "COGNITO_CLIENT_ID") - _ (when (not= (aget payload "iss") issuer) (throw (ex-info "iss not found" {}))) - _ (when (not= (aget payload "aud") client-id) (throw (ex-info "aud not found" {}))) - _ (when (and (aget payload "exp") (< (aget payload "exp") now-s)) - (throw (ex-info "exp" {}))) - jwks-url (aget env "COGNITO_JWKS_URL") - keys (get-jwks-keys jwks-url) - key (.find keys (fn [k] (= (aget k "kid") (aget header "kid")))) - key (if key - key - (p/let [keys (get-jwks-keys jwks-url :force? true) - key (.find keys (fn [k] (= (aget k "kid") (aget header "kid"))))] - key)) - _ (when-not key (throw (ex-info "kid" {}))) - crypto-key (import-rsa-key key) - data (.encode text-encoder (str header-part "." payload-part)) - signature (base64url->uint8array signature-part) - ok (.verify js/crypto.subtle - "RSASSA-PKCS1-v1_5" - crypto-key - signature - data)] - (when ok - (cache-token! token payload now-ms) - payload))))) + (let [parts (string/split token #"\.")] + (when (not= 3 (count parts)) (throw (ex-info "invalid" {}))) + (let [header-part (nth parts 0) + payload-part (nth parts 1) + signature-part (nth parts 2) + now-ms (get-now-ms) + now-s (js/Math.floor (/ now-ms 1000))] + (if-let [cached (cached-token token now-s now-ms)] + (p/resolved cached) + (p/let [header (decode-jwt-part header-part) + payload (decode-jwt-part payload-part) + issuer (aget env "COGNITO_ISSUER") + client-id (aget env "COGNITO_CLIENT_ID") + _ (when (not= (aget payload "iss") issuer) (throw (ex-info "iss not found" {}))) + _ (when (not= (aget payload "aud") client-id) (throw (ex-info "aud not found" {}))) + _ (when (and (aget payload "exp") (< (aget payload "exp") now-s)) + (throw (ex-info "exp" {}))) + jwks-url (aget env "COGNITO_JWKS_URL") + jwks-keys (get-jwks-keys jwks-url) + matching-key (.find jwks-keys (fn [k] (= (aget k "kid") (aget header "kid")))) + matching-key (if matching-key + matching-key + (p/let [jwks-keys (get-jwks-keys jwks-url :force? true) + matching-key (.find jwks-keys (fn [k] (= (aget k "kid") (aget header "kid"))))] + matching-key)) + _ (when-not matching-key (throw (ex-info "kid" {}))) + crypto-key (import-rsa-key matching-key) + data (.encode text-encoder (str header-part "." payload-part)) + signature (base64url->uint8array signature-part) + ok (.verify js/crypto.subtle + "RSASSA-PKCS1-v1_5" + crypto-key + signature + data)] + (when ok + (cache-token! token payload now-ms) + payload)))))) diff --git a/deps/db-sync/.clj-condo/config.edn b/deps/db-sync/.clj-condo/config.edn new file mode 100644 index 0000000000..52ae3140d2 --- /dev/null +++ b/deps/db-sync/.clj-condo/config.edn @@ -0,0 +1,18 @@ +{:linters + {:aliased-namespace-symbol {:level :warning} + :namespace-name-mismatch {:level :warning} + :used-underscored-binding {:level :warning} + :shadowed-var {:level :warning + :exclude [meta name key keys uuid type]} + + :consistent-alias + {:aliases {clojure.pprint pprint + clojure.string string + datascript.core d + datascript.transit dt + logseq.publish.common publish-common + logseq.publish.model publish-model}}} + :lint-as {promesa.core/let clojure.core/let + shadow.cljs.modern/defclass clj-kondo.lint-as/def-catch-all} + :skip-comments true + :output {:progress true}} diff --git a/deps/db-sync/src/logseq/db_sync/worker.cljs b/deps/db-sync/src/logseq/db_sync/worker.cljs index 22373eb59c..73cf05bb44 100644 --- a/deps/db-sync/src/logseq/db_sync/worker.cljs +++ b/deps/db-sync/src/logseq/db_sync/worker.cljs @@ -185,10 +185,6 @@ [^js self ^js ws] (swap! (presence* self) dissoc ws)) -(defn- fail-fast [tag data] - (log/error tag data) - (throw (ex-info (name tag) data))) - (defn- coerce-http-request [schema-key body] (if-let [coercer (get db-sync-schema/http-request-coercers schema-key)] (let [coerced (coerce coercer body {:schema schema-key :dir :request})] @@ -277,11 +273,6 @@ (let [url (js/URL. (.-url request))] (str (.-origin url) "/assets/" graph-id "/" snapshot-id ".snapshot"))) -(defn- maybe-compress-stream [stream] - (if (exists? js/CompressionStream) - (.pipeThrough stream (js/CompressionStream. "gzip")) - stream)) - (defn- maybe-decompress-stream [stream encoding] (if (and (= encoding snapshot-content-encoding) (exists? js/DecompressionStream)) (.pipeThrough stream (js/DecompressionStream. "gzip")) diff --git a/deps/publish/.clj-kondo/config.edn b/deps/publish/.clj-kondo/config.edn index 402b4e3e2e..52ae3140d2 100644 --- a/deps/publish/.clj-kondo/config.edn +++ b/deps/publish/.clj-kondo/config.edn @@ -12,7 +12,7 @@ datascript.transit dt logseq.publish.common publish-common logseq.publish.model publish-model}}} - :lint-as {logseq.publish.async/js-await clojure.core/let + :lint-as {promesa.core/let clojure.core/let shadow.cljs.modern/defclass clj-kondo.lint-as/def-catch-all} :skip-comments true :output {:progress true}} diff --git a/docs/dev-practices.md b/docs/dev-practices.md index f861a5c153..8f983070b4 100644 --- a/docs/dev-practices.md +++ b/docs/dev-practices.md @@ -110,18 +110,6 @@ $ typos -w To configure it e.g. for dealing with false positives, see `typos.toml`. -### Separate DB and File Graph Code - -There is a growing number of code and features that are only for file or DB graphs. Run this linter to -ensure that code you add or modify keeps with existing conventions: - -``` -$ bb lint:db-and-file-graphs-separate -✅ All checks passed! -``` - -The main convention is that file and db specific files go under directories named `file_based` and `db_based` respectively. To see the full list of file and db specific namespaces and files see the top of [the script](/scripts/src/logseq/tasks/dev/db_and_file_graphs.clj). - ### Separate Worker from Frontend The worker and frontend code share common code from deps/ and `frontend.common.*`. However, the worker should never depend on other frontend namespaces as it could pull in libraries like React which cause it to fail hard. Likewise the frontend should never depend on worker namespaces. Run this linter to ensure worker and frontend namespaces don't require each other: @@ -363,7 +351,7 @@ These tasks are specific to database graphs. For these tasks there is a one time $ bb dev:transact test-db '[:find ?b :where [?b :block/title "say wut"]]' '(fn [id] (vector :db/add id :block/title "say woot!"))' Updated 1 block(s) for graph test-db! ``` - + Run the dev command `Replace graph with its db.sqlite file` to use the updated graph in the desktop app. * `dev:create` - Create a DB graph given a `sqlite.build` EDN file diff --git a/scripts/src/logseq/tasks/dev/lint.clj b/scripts/src/logseq/tasks/dev/lint.clj index 377497c803..8dff77c741 100644 --- a/scripts/src/logseq/tasks/dev/lint.clj +++ b/scripts/src/logseq/tasks/dev/lint.clj @@ -15,7 +15,6 @@ "bb lint:carve" "bb lint:large-vars" "bb lint:worker-and-frontend-separate" - "bb lint:db-and-file-graphs-separate" "bb lang:validate-translations" "bb lint:ns-docstrings"]] (println cmd) diff --git a/src/main/frontend/worker/db_sync.cljs b/src/main/frontend/worker/db_sync.cljs index 973abbc9f3..32aaf4e528 100644 --- a/src/main/frontend/worker/db_sync.cljs +++ b/src/main/frontend/worker/db_sync.cljs @@ -1080,6 +1080,61 @@ (concat (map (fn [id] [:db/retractEntity id]) retract-block-ids)) keep-last-update))) +(defn- apply-remote-tx-with-local-changes! + [{:keys [conn local-txs reversed-tx-data safe-remote-tx-data remote-deleted-blocks + temp-tx-meta *remote-tx-report *reversed-tx-report *remote-deleted-ids *rebase-tx-data]}] + (let [batch-tx-meta {:rtc-tx? true + ;; Reverse/rebase batches can temporarily violate block schema. + :skip-validate-db? true}] + (ldb/transact-with-temp-conn! + conn + batch-tx-meta + (fn [temp-conn _*batch-tx-data] + (let [tx-meta temp-tx-meta + reversed-tx-report (ldb/transact! temp-conn reversed-tx-data (assoc tx-meta :op :reverse)) + _ (reset! *reversed-tx-report reversed-tx-report) + ;; 2. transact remote tx-data + remote-tx-report (let [tx-meta (assoc tx-meta :op :transact-remote-tx-data)] + (ldb/transact! temp-conn safe-remote-tx-data tx-meta)) + _ (reset! *remote-tx-report remote-tx-report) + local-deleted-blocks (get-local-deleted-blocks reversed-tx-report reversed-tx-data) + _ (when (seq remote-deleted-blocks) + (reset! *remote-deleted-ids (set (map :block/uuid remote-deleted-blocks)))) + deleted-nodes (concat local-deleted-blocks remote-deleted-blocks) + deleted-ids (set (keep :block/uuid deleted-nodes)) + ;; 3. rebase pending local txs + rebase-tx-report (when (seq local-txs) + (let [pending-tx-data (mapcat :tx local-txs) + rebased-tx-data (sanitize-tx-data + (or (:db-after remote-tx-report) + (:db-after reversed-tx-report)) + pending-tx-data + (set (map :block/uuid local-deleted-blocks)))] + (when (seq rebased-tx-data) + (ldb/transact! temp-conn rebased-tx-data (assoc tx-meta :op :rebase))))) + ;; 4. delete nodes and fix tx data + db @temp-conn + deleted-nodes (keep (fn [id] (d/entity db [:block/uuid id])) deleted-ids)] + (delete-nodes! temp-conn deleted-nodes (assoc tx-meta :op :delete-blocks)) + (fix-tx! temp-conn remote-tx-report rebase-tx-report (assoc tx-meta :op :fix)))) + {:listen-db (fn [{:keys [tx-meta tx-data]}] + (when-not (contains? #{:reverse :transact-remote-tx-data} (:op tx-meta)) + (swap! *rebase-tx-data into tx-data)))}))) + +(defn- apply-remote-tx-without-local-changes! + [{:keys [conn safe-remote-tx-data remote-deleted-block-ids temp-tx-meta]}] + (let [db @conn] + (ldb/transact-with-temp-conn! + conn + {:rtc-tx? true} + (fn [temp-conn] + (when (seq safe-remote-tx-data) + (d/transact! temp-conn safe-remote-tx-data {:rtc-tx? true})) + (when-let [deleted-nodes (keep (fn [id] (d/entity db [:block/uuid id])) + remote-deleted-block-ids)] + (delete-nodes! temp-conn deleted-nodes + (assoc temp-tx-meta :op :delete-blocks))))))) + (defn- apply-remote-tx! [repo client tx-data*] (if-let [conn (worker-state/get-datascript-conn repo)] @@ -1108,57 +1163,20 @@ :temp-conn? true :gen-undo-ops? false :persist-op? false} - tx-report - (if has-local-changes? - (let [batch-tx-meta {:rtc-tx? true}] - (ldb/transact-with-temp-conn! - conn - batch-tx-meta - (fn [temp-conn _*batch-tx-data] - (let [tx-meta temp-tx-meta - reversed-tx-report (ldb/transact! temp-conn reversed-tx-data (assoc tx-meta :op :reverse)) - _ (reset! *reversed-tx-report reversed-tx-report) - ;; 2. transact remote tx-data - remote-tx-report (let [tx-meta (assoc tx-meta :op :transact-remote-tx-data)] - (ldb/transact! temp-conn safe-remote-tx-data tx-meta)) - _ (reset! *remote-tx-report remote-tx-report) - local-deleted-blocks (get-local-deleted-blocks reversed-tx-report reversed-tx-data) - _ (when (seq remote-deleted-blocks) - (reset! *remote-deleted-ids (set (map :block/uuid remote-deleted-blocks)))) - ;; _ (prn :debug - ;; :local-deleted-blocks (map (fn [b] (select-keys b [:db/id :block/title])) local-deleted-blocks) - ;; :remote-deleted-blocks remote-deleted-blocks) - deleted-nodes (concat local-deleted-blocks remote-deleted-blocks) - deleted-ids (set (keep :block/uuid deleted-nodes)) - ;; 3. rebase pending local txs - rebase-tx-report (when (seq local-txs) - (let [pending-tx-data (mapcat :tx local-txs) - rebased-tx-data (sanitize-tx-data - (or (:db-after remote-tx-report) - (:db-after reversed-tx-report)) - pending-tx-data - (set (map :block/uuid local-deleted-blocks)))] - ;; (prn :debug :pending-tx-data pending-tx-data) - ;; (prn :debug :rebased-tx-data rebased-tx-data) - (when (seq rebased-tx-data) - (ldb/transact! temp-conn rebased-tx-data (assoc tx-meta :op :rebase))))) - ;; 4. delete nodes and fix tx data - db @temp-conn - deleted-nodes (keep (fn [id] (d/entity db [:block/uuid id])) deleted-ids)] - (delete-nodes! temp-conn deleted-nodes (assoc tx-meta :op :delete-blocks)) - (fix-tx! temp-conn remote-tx-report rebase-tx-report (assoc tx-meta :op :fix)))) - {:listen-db (fn [{:keys [tx-meta tx-data]}] - (when-not (contains? #{:reverse :transact-remote-tx-data} (:op tx-meta)) - (swap! *rebase-tx-data into tx-data)))})) - (ldb/transact-with-temp-conn! - conn - {:rtc-tx? true} - (fn [temp-conn] - (when (seq safe-remote-tx-data) - (d/transact! temp-conn safe-remote-tx-data {:rtc-tx? true})) - (when-let [deleted-nodes (keep (fn [id] (d/entity db [:block/uuid id])) remote-deleted-block-ids)] - (delete-nodes! temp-conn deleted-nodes - (assoc temp-tx-meta :op :delete-blocks)))))) + apply-context {:conn conn + :local-txs local-txs + :reversed-tx-data reversed-tx-data + :safe-remote-tx-data safe-remote-tx-data + :remote-deleted-blocks remote-deleted-blocks + :remote-deleted-block-ids remote-deleted-block-ids + :temp-tx-meta temp-tx-meta + :*remote-tx-report *remote-tx-report + :*reversed-tx-report *reversed-tx-report + :*remote-deleted-ids *remote-deleted-ids + :*rebase-tx-data *rebase-tx-data} + tx-report (if has-local-changes? + (apply-remote-tx-with-local-changes! apply-context) + (apply-remote-tx-without-local-changes! apply-context)) remote-tx-report @*remote-tx-report] ;; persist rebase tx to client ops (when has-local-changes? diff --git a/src/test/frontend/worker/db_sync_sim_test.cljs b/src/test/frontend/worker/db_sync_sim_test.cljs index 04331cab3a..11316e5db6 100644 --- a/src/test/frontend/worker/db_sync_sim_test.cljs +++ b/src/test/frontend/worker/db_sync_sim_test.cljs @@ -423,6 +423,38 @@ (str "db empty seed=" seed " history=" (count @history)))))))))) (defonce op-runs 500) + +(defn- run-random-ops! + [rng server clients repo->state base-uuid history run-ops-opts steps] + (dotimes [_ steps] + (let [client (rand-nth! rng clients) + state (get repo->state (:repo client))] + (run-ops! rng (assoc client :base-uuid base-uuid :state state) 1 history run-ops-opts) + (sync-loop! server clients)))) + +(defn- run-local-ops! + [rng conn base-uuid state history run-ops-opts steps] + (dotimes [_ steps] + (run-ops! rng {:conn conn :base-uuid base-uuid :state state} 1 history run-ops-opts))) + +(defn- assert-synced-attrs! + [seed history attrs-a attrs-b attrs-c] + (when-not (= attrs-a attrs-b) + (let [[a b] (take 2 (data/diff attrs-a attrs-b))] + (prn :debug :diff :attrs-a a :attrs-b b))) + (when-not (= attrs-a attrs-c) + (let [[a c] (take 2 (data/diff attrs-a attrs-c))] + (prn :debug :diff :attrs-a a :attrs-c c))) + (is (= attrs-a attrs-b) + (str "db mismatch A/B seed=" seed + " a=" (count attrs-a) + " b=" (count attrs-b) + " history=" (count @history))) + (is (= attrs-a attrs-c) + (str "db mismatch A/C seed=" seed + " a=" (count attrs-a) + " c=" (count attrs-c) + " history=" (count @history)))) (deftest three-clients-single-repo-sim-test (prn :debug "run three-clients-single-repo-sim-test") (testing "db-sync convergence with three clients sharing one repo" @@ -451,12 +483,10 @@ repo-c {:conn conn-c :ops-conn ops-c}} (fn [] (reset! db-sync/*repo->latest-remote-tx {}) - (ensure-base-page! conn-a base-uuid) - (ensure-base-page! conn-b base-uuid) - (ensure-base-page! conn-c base-uuid) - (client-op/update-local-tx repo-a 0) - (client-op/update-local-tx repo-b 0) - (client-op/update-local-tx repo-c 0) + (doseq [conn [conn-a conn-b conn-c]] + (ensure-base-page! conn base-uuid)) + (doseq [repo [repo-a repo-b repo-c]] + (client-op/update-local-tx repo 0)) (let [clients [{:repo repo-a :conn conn-a :client client-a :online? true} {:repo repo-b :conn conn-b :client client-b :online? true} {:repo repo-c :conn conn-c :client client-c :online? true}] @@ -464,24 +494,16 @@ run-ops-opts {}] (prn :debug :phase-a) ;; Phase A: all online - (dotimes [_ op-runs] - (let [client (rand-nth! rng clients) - state (get repo->state (:repo client))] - (run-ops! rng (assoc client :base-uuid base-uuid :state state) 1 history run-ops-opts) - (sync-loop! server clients))) + (run-random-ops! rng server clients repo->state base-uuid history run-ops-opts op-runs) ;; Phase B: C offline, A/B online (prn :debug :phase-b-c-offline) (let [clients-phase-b [{:repo repo-a :conn conn-a :client client-a :online? true} {:repo repo-b :conn conn-b :client client-b :online? true} {:repo repo-c :conn conn-c :client client-c :online? false}]] - (dotimes [_ op-runs] - (let [client (rand-nth! rng (subvec (vec clients-phase-b) 0 2)) - state (get repo->state (:repo client))] - (run-ops! rng (assoc client :base-uuid base-uuid :state state) 1 history run-ops-opts) - (sync-loop! server clients-phase-b))) - (dotimes [_ op-runs] - (run-ops! rng {:client client-c :conn conn-c :base-uuid base-uuid :state state-c} 1 history run-ops-opts))) + (run-random-ops! rng server (subvec (vec clients-phase-b) 0 2) repo->state + base-uuid history run-ops-opts op-runs) + (run-local-ops! rng conn-c base-uuid state-c history run-ops-opts op-runs)) ;; Phase C: reconnect C (prn :debug :phase-c-reconnect) @@ -492,13 +514,9 @@ (let [clients-phase-d [{:repo repo-a :conn conn-a :client client-a :online? false} {:repo repo-b :conn conn-b :client client-b :online? true} {:repo repo-c :conn conn-c :client client-c :online? true}]] - (dotimes [_ op-runs] - (let [client (rand-nth! rng (subvec (vec clients-phase-d) 1 3)) - state (get repo->state (:repo client))] - (run-ops! rng (assoc client :base-uuid base-uuid :state state) 1 history run-ops-opts) - (sync-loop! server clients-phase-d))) - (dotimes [_ op-runs] - (run-ops! rng {:conn conn-a :base-uuid base-uuid :state state-a} 1 history run-ops-opts))) + (run-random-ops! rng server (subvec (vec clients-phase-d) 1 3) repo->state + base-uuid history run-ops-opts op-runs) + (run-local-ops! rng conn-a base-uuid state-a history run-ops-opts op-runs)) ;; Final sync (prn :debug :final-sync) @@ -514,21 +532,4 @@ (let [attrs-a (block-attr-map @conn-a) attrs-b (block-attr-map @conn-b) attrs-c (block-attr-map @conn-c)] - (when-not (= attrs-a attrs-b) - (let [[a b] (take 2 (data/diff attrs-a attrs-b))] - (prn :debug :diff :attrs-a a - :attrs-b b))) - (when-not (= attrs-a attrs-c) - (let [[a c] (take 2 (data/diff attrs-a attrs-c))] - (prn :debug :diff :attrs-a a - :attrs-c c))) - (is (= attrs-a attrs-b) - (str "db mismatch A/B seed=" seed - " a=" (count attrs-a) - " b=" (count attrs-b) - " history=" (count @history))) - (is (= attrs-a attrs-c) - (str "db mismatch A/C seed=" seed - " a=" (count attrs-a) - " c=" (count attrs-c) - " history=" (count @history)))))))))) + (assert-synced-attrs! seed history attrs-a attrs-b attrs-c))))))))