fix: handle desktop sqlite binary export

Fixes logseq/db-test#873
This commit is contained in:
Tienson Qin
2026-05-15 16:10:48 +08:00
parent ee9a667392
commit 18c69fe1a8
11 changed files with 450 additions and 42 deletions

View File

@@ -216,7 +216,7 @@
:accelerator false}])
[{:label "Always on Top"
:type "checkbox"
:click (fn [menuItem browserWindow]
:click (fn [^js menuItem ^js browserWindow]
;; switch alwaysOnTop state
(.setAlwaysOnTop browserWindow (.-checked menuItem)))}])})
;; Windows has no about role

View File

@@ -157,9 +157,37 @@
(-> (if (string? file) file (.arrayBuffer file))
(p/then db-asset/<get-file-array-buffer-checksum)))
(defn- field-value
[payload field-name]
(or (get payload field-name)
(get payload (keyword field-name))
(when (object? payload)
(aget payload field-name))))
(defn- indexed-object->array
[payload]
(let [keys (->> (js/Object.keys payload)
(js->clj)
(filter #(re-matches #"\d+" %))
(sort-by #(js/parseInt % 10)))]
(when (seq keys)
(clj->js (map #(aget payload %) keys)))))
(defn- indexed-map->array
[payload]
(let [keys (->> (keys payload)
(filter #(re-matches #"\d+" (str %)))
(sort-by #(js/parseInt (str %) 10)))]
(when (seq keys)
(clj->js (map #(get payload %) keys)))))
(defn ->uint8
[payload]
(cond
(and (exists? js/Blob)
(instance? js/Blob payload))
payload
(instance? js/Uint8Array payload)
payload
@@ -176,10 +204,32 @@
(sequential? payload)
(js/Uint8Array. (clj->js payload))
(and (= "Buffer" (field-value payload "type"))
(some? (field-value payload "data")))
(->uint8 (field-value payload "data"))
(map? payload)
(if-let [data (indexed-map->array payload)]
(js/Uint8Array. data)
(throw (ex-info "unsupported binary payload"
{:payload-type (str (type payload))
:keys (mapv str (keys payload))})))
(and (object? payload)
(= "Buffer" (aget payload "type"))
(array? (aget payload "data")))
(js/Uint8Array. (aget payload "data"))
(number? (aget payload "length")))
(js/Uint8Array. payload)
(object? payload)
(if-let [data (indexed-object->array payload)]
(js/Uint8Array. data)
(throw (ex-info "unsupported binary payload"
{:payload-type (str (type payload))
:object-tag (try
(.call (.-toString (.-prototype js/Object)) payload)
(catch :default _ nil))
:keys (try
(js->clj (js/Object.keys payload))
(catch :default _ nil))})))
:else
(throw (ex-info "unsupported binary payload"
@@ -187,13 +237,17 @@
(defn <get-all-assets
[]
(when-let [path (config/get-current-repo-assets-root)]
(p/let [result (p/catch (fs/readdir path {:path-only? true})
(constantly nil))]
(p/all (map (fn [path]
(p/let [data (fs/read-file-raw path "" {})]
(let [path' (util/node-path.join "assets" (util/node-path.basename path))]
[path' data]))) result)))))
(if-let [path (config/get-current-repo-assets-root)]
(p/let [exists? (p/catch (fs/stat path)
(constantly nil))]
(if exists?
(p/let [result (fs/readdir path {:path-only? true})]
(p/all (map (fn [path]
(p/let [data (fs/read-file-raw path "" {})]
(let [path' (util/node-path.join "assets" (util/node-path.basename path))]
[path' data]))) result)))
(p/resolved [])))
(p/resolved [])))
(defn ensure-assets-dir!
[repo]

View File

@@ -6,6 +6,7 @@
[frontend.context.i18n :refer [t]]
[frontend.db :as db]
[frontend.extensions.zip :as zip]
[frontend.fs :as fs]
[frontend.handler.assets :as assets-handler]
[frontend.handler.export.common :as export-common-handler]
[frontend.handler.notification :as notification]
@@ -16,6 +17,7 @@
[goog.dom :as gdom]
[logseq.db :as ldb]
[logseq.db.common.sqlite :as common-sqlite]
[logseq.common.path :as path]
[logseq.publishing.html :as publish-html]
[promesa.core :as p]))
@@ -44,10 +46,42 @@
(.setAttribute anchor "download" "index.html")
(.click anchor))))))
(defn- file-name [repo extension]
(-> repo
(string/replace #"^/+" "")
(str "_" (quot (util/time-ms) 1000))
(str "." (string/lower-case (name extension)))))
(defn- normalize-zip-entry
[[filename data]]
(try
[filename (assets-handler/->uint8 data)]
(catch :default e
(throw (ex-info "unsupported zip entry payload"
(assoc (or (ex-data e) {})
:filename filename)
e)))))
(defn- <export-db-binary-for-zip
[repo]
(if (util/electron?)
(state/<invoke-db-worker :thread-api/export-db-binary repo)
(persist-db/<export-db repo {:return-data? true})))
(defn- <export-zipfile-to-desktop!
[repo ^js zipfile]
(let [repo-name (common-sqlite/sanitize-db-name repo)
export-dir (path/path-join (config/get-repo-dir repo) "export")
export-path (path/path-join export-dir (file-name repo-name "zip"))]
(p/let [content (.arrayBuffer zipfile)
_ (fs/mkdir-if-not-exists export-dir)
_ (js/window.apis.writeFileBytes export-path content)]
export-path)))
(defn db-based-export-repo-as-zip!
[repo]
(state/pub-event! [:dialog/export-zip (t :export/preparing-zip)])
(-> (p/let [db-data (persist-db/<export-db repo {:return-data? true})
(-> (p/let [db-data (<export-db-binary-for-zip repo)
filename "db.sqlite"
repo-name (common-sqlite/sanitize-db-name repo)
_ (state/set-state! :graph/exporting-state {:total 100
@@ -55,7 +89,8 @@
:current-page (t :export/collecting-assets)
:label (t :export/exporting)})
assets (assets-handler/<get-all-assets)
files (cons [filename db-data] assets)
files (map normalize-zip-entry
(cons [filename db-data] assets))
_ (state/set-state! :graph/exporting-state {:total 100
:current-idx 40
:current-page (t :export/creating-zip)
@@ -73,10 +108,13 @@
:current-idx 100
:current-page (t :export/finalizing)
:label (t :export/exporting)})
(when-let [anchor (gdom/getElement "download-as-zip")]
(.setAttribute anchor "href" (js/window.URL.createObjectURL zipfile))
(.setAttribute anchor "download" (.-name zipfile))
(.click anchor)))
(if (util/electron?)
(p/let [export-path (<export-zipfile-to-desktop! repo zipfile)]
(notification/show! (t :export/zip-exported export-path) :success false))
(when-let [anchor (gdom/getElement "download-as-zip")]
(.setAttribute anchor "href" (js/window.URL.createObjectURL zipfile))
(.setAttribute anchor "download" (.-name zipfile))
(.click anchor))))
(p/catch (fn [error]
(js/console.error error)
(notification/show! (t :export/zip-error) :error)))
@@ -87,12 +125,6 @@
[repo]
(db-based-export-repo-as-zip! repo))
(defn- file-name [repo extension]
(-> repo
(string/replace #"^/+" "")
(str "_" (quot (util/time-ms) 1000))
(str "." (string/lower-case (name extension)))))
(defn export-repo-as-debug-transit!
[repo]
(p/let [result (export-common-handler/<get-debug-datoms repo)

View File

@@ -16,6 +16,12 @@
[base-url]
(str (normalize-base-url base-url) "/v1/invoke"))
(defn- import-db-binary-url
[base-url repo]
(str (normalize-base-url base-url)
"/v1/import-db-binary?repo="
(js/encodeURIComponent repo)))
(defn- events-url
[base-url]
(str (normalize-base-url base-url) "/v1/events"))
@@ -27,6 +33,10 @@
(seq auth-token)
(assoc "Authorization" (str "Bearer " auth-token))))
(defn- binary-headers
[auth-token]
(assoc (base-headers auth-token) "Content-Type" "application/octet-stream"))
(defn- parse-response-body
[body]
(cond
@@ -120,6 +130,32 @@
(on-invoke-failure method args error))
(throw error))))))
(defn- import-db-binary!
[{:keys [base-url auth-token fetch-fn on-invoke-success on-invoke-failure]} repo data]
(let [method "thread-api/import-db-binary"
args [repo data]]
(->
(p/let [{:keys [status body]}
(fetch-fn {:method "POST"
:url (import-db-binary-url base-url repo)
:headers (binary-headers auth-token)
:body data})
parsed (parse-response-body body)]
(if (<= 200 status 299)
(let [result (ldb/read-transit-str (:resultTransit parsed))]
(when on-invoke-success
(on-invoke-success method args result))
result)
(let [error (:error parsed)]
(throw (ex-info (or (:message error) "db-worker invoke failed")
(cond-> {:status status
:code (normalize-code (:code error))}
error (assoc :error error)))))))
(p/catch (fn [error]
(when on-invoke-failure
(on-invoke-failure method args error))
(throw error))))))
(defn connect-events!
[{:keys [base-url auth-token event-handler open-sse-fn schedule-fn reconnect-delay-ms on-event-error]} wrapped-worker]
(let [connected? (atom true)
@@ -191,7 +227,7 @@
(<import-db [_this repo data]
(->
(invoke! client "thread-api/import-db-binary" [repo data])
(import-db-binary! client repo data)
(p/catch (fn [error]
(log/error :import-db-error repo error "SQLiteDB import error")
(notification/show! (t :storage/sqlitedb-import-error error) :error) {})))))

View File

@@ -37,17 +37,22 @@
(.writeHead res status #js {"Content-Type" "text/plain"})
(.end res text))
(defn- <read-body
(defn- <read-body-buffer
[^js req]
(p/create
(fn [resolve reject]
(let [chunks (array)]
(.on req "data" (fn [chunk] (.push chunks chunk)))
(.on req "end" (fn []
(let [buf (js/Buffer.concat chunks)]
(resolve (.toString buf "utf8")))))
(resolve (js/Buffer.concat chunks))))
(.on req "error" reject)))))
(defn- <read-body
[^js req]
(p/then (<read-body-buffer req)
(fn [buf]
(.toString buf "utf8"))))
(defn- parse-args
[argv]
(loop [args (vec (drop 2 argv))
@@ -136,6 +141,19 @@
(p/finally (fn []
(js/clearTimeout timeout-id))))))
(defn- <invoke-binary!
[^js proxy method-str method-kw repo payload]
(let [started-at (js/Date.now)
timeout-id (js/setTimeout
(fn []
(log/warn :db-worker-node-invoke-timeout
{:method (or method-kw method-str)
:elapsed-ms (- (js/Date.now) started-at)}))
10000)]
(-> (p/do! (.remoteInvokeBinary proxy method-str repo payload))
(p/finally (fn []
(js/clearTimeout timeout-id))))))
(defn- <init-worker!
[proxy]
(let [method-kw :thread-api/init
@@ -286,9 +304,11 @@
(http/createServer
(fn [^js req ^js res]
(let [url (.-url req)
parsed-url (js/URL. url "http://127.0.0.1")
request-path (.-pathname parsed-url)
method (.-method req)]
(cond
(= url "/healthz")
(= request-path "/healthz")
(send-json! res (if @*ready? 200 503)
(health-payload {:bound-repo bound-repo
:host host
@@ -296,10 +316,40 @@
:owner-source owner-source
:root-dir root-dir}))
(= url "/v1/events")
(= request-path "/v1/events")
(sse-handler req res)
(= url "/v1/invoke")
(= request-path "/v1/import-db-binary")
(if (= method "POST")
(let [repo (.get (.-searchParams parsed-url) "repo")
method-kw :thread-api/import-db-binary
method-str (normalize-method-str method-kw)]
(-> (p/let [binary (<read-body-buffer req)
args-for-validation [repo binary]]
(if-let [{:keys [status error]} (repo-error method-kw args-for-validation bound-repo)]
(send-json! res status {:ok false :error error})
(p/let [_ (when (contains? write-methods method-kw)
(let [{:keys [path lock]} @*lock-info]
(db-lock/assert-lock-owner! path lock)))
result (<invoke-binary! proxy method-str method-kw repo binary)]
(send-json! res 200 {:ok true :resultTransit (ldb/write-transit-str result)}))))
(p/catch (fn [error]
(let [data (ex-data error)
status (invoke-error-status data)
code (invoke-error-code data)
message (invoke-error-message error data)
payload {:ok false
:error {:code code
:message message}}]
(log/error :db-worker-node-invoke-failed
{:status status
:code code
:method method-str
:error error})
(send-json! res status payload))))))
(send-text! res 405 "method-not-allowed"))
(= request-path "/v1/invoke")
(if (= method "POST")
(-> (p/let [body (<read-body req)
payload (js/JSON.parse body)

View File

@@ -620,6 +620,7 @@
:export/view-nodes-data-copied "Copied view nodes' data!"
:export/zip "Export both SQLite DB and assets"
:export/zip-error "Export zip failed."
:export/zip-exported "ZIP exported to {1}."
:export.backup/backup-now "Backup now"
:export.backup/cancel "Cancel backup"

View File

@@ -617,6 +617,7 @@
:export/view-nodes-data-copied "已复制视图节点数据!"
:export/zip "导出 SQLite 数据库和资源"
:export/zip-error "导出 ZIP 失败。"
:export/zip-exported "ZIP 已导出到 {1}。"
:export.backup/backup-now "立即备份"
:export.backup/cancel "取消备份"

View File

@@ -1,6 +1,9 @@
(ns frontend.handler.assets-test
(:require [cljs.test :refer [deftest is]]
[frontend.handler.assets :as assets]))
(:require [cljs.test :refer [async deftest is]]
[frontend.config :as config]
[frontend.fs :as fs]
[frontend.handler.assets :as assets]
[promesa.core :as p]))
(defn- uint8->vec
[^js payload]
@@ -26,3 +29,56 @@
(is (instance? js/Uint8Array output))
(is (= [10 11 12] (uint8->vec output)))))
(deftest coerce-buffer-like-map-to-uint8-test
(let [buffer-like {"type" "Buffer"
"data" [13 14 15]}
output (#'assets/->uint8 buffer-like)]
(is (instance? js/Uint8Array output))
(is (= [13 14 15] (uint8->vec output)))))
(deftest coerce-buffer-like-object-with-seq-data-to-uint8-test
(let [buffer-like #js {:type "Buffer"
:data [16 17 18]}
output (#'assets/->uint8 buffer-like)]
(is (instance? js/Uint8Array output))
(is (= [16 17 18] (uint8->vec output)))))
(deftest coerce-indexed-byte-object-to-uint8-test
(let [buffer-like #js {"0" 19
"1" 20
"2" 21}
output (#'assets/->uint8 buffer-like)]
(is (instance? js/Uint8Array output))
(is (= [19 20 21] (uint8->vec output)))))
(deftest coerce-indexed-byte-map-to-uint8-test
(let [buffer-like {"0" 22
"1" 23
"2" 24}
output (#'assets/->uint8 buffer-like)]
(is (instance? js/Uint8Array output))
(is (= [22 23 24] (uint8->vec output)))))
(deftest get-all-assets-does-not-readdir-missing-assets-dir
(async done
(let [readdir-calls (atom 0)
original-assets-root config/get-current-repo-assets-root
original-stat fs/stat
original-readdir fs/readdir]
(set! config/get-current-repo-assets-root (constantly "/tmp/graph/assets"))
(set! fs/stat (fn [path]
(is (= "/tmp/graph/assets" path))
(p/rejected (js/Error. "ENOENT"))))
(set! fs/readdir (fn [& _args]
(swap! readdir-calls inc)
(p/rejected (js/Error. "readdir should not be called"))))
(-> (p/let [result (assets/<get-all-assets)]
(is (= [] result))
(is (zero? @readdir-calls)))
(p/catch (fn [e]
(is false (str "unexpected error: " e))))
(p/finally (fn []
(set! config/get-current-repo-assets-root original-assets-root)
(set! fs/stat original-stat)
(set! fs/readdir original-readdir)
(done)))))))

View File

@@ -2,6 +2,9 @@
(:require [cljs.test :refer [are async deftest is testing use-fixtures]]
[clojure.string :as string]
[electron.ipc :as ipc]
[frontend.config :as config]
[frontend.fs :as fs]
[frontend.handler.assets :as assets-handler]
[frontend.handler.export :as export]
[frontend.handler.export.text :as export-text]
[frontend.handler.notification :as notification]
@@ -173,6 +176,71 @@
(set! (.-showSaveFilePicker js/window) original-picker)
(done)))))))
(deftest export-zip-on-electron-writes-export-file-and-notifies-path
(async done
(let [mkdir-calls (atom [])
writes (atom [])
notification-calls (atom [])
original-electron? util/electron?
original-time-ms util/time-ms
original-get-repo-dir config/get-repo-dir
original-mkdir-if-not-exists fs/mkdir-if-not-exists
original-apis (.-apis js/window)
original-export-db persist-db/<export-db
original-invoke-db-worker state/<invoke-db-worker
original-get-all-assets assets-handler/<get-all-assets
original-notification-show! notification/show!]
(set! util/electron? (constantly true))
(set! util/time-ms (constantly 123000))
(set! config/get-repo-dir (fn [repo]
(is (= "logseq_db_big_graph" repo))
"/tmp/logseq/graphs/logseq_db_big_graph"))
(set! fs/mkdir-if-not-exists (fn [path]
(swap! mkdir-calls conj path)
(p/resolved nil)))
(set! (.-apis js/window)
#js {:writeFileBytes (fn [path content]
(swap! writes conj [path content])
(p/resolved nil))})
(set! persist-db/<export-db
(fn [& _args]
(p/rejected (ex-info "desktop zip export should read binary from db worker" {}))))
(set! state/<invoke-db-worker
(fn [qkw repo]
(is (= :thread-api/export-db-binary qkw))
(is (= "logseq_db_big_graph" repo))
(p/resolved {:type "Buffer"
:data [1 2 3]})))
(set! assets-handler/<get-all-assets
(fn []
(p/resolved [["assets/a.bin" {:type "Buffer"
:data [4 5 6]}]])))
(set! notification/show! (fn [& args]
(swap! notification-calls conj args)))
(-> (export/db-based-export-repo-as-zip! "logseq_db_big_graph")
(p/then (fn [_]
(let [expected-path "/tmp/logseq/graphs/logseq_db_big_graph/export/big_graph_123.zip"]
(is (= ["/tmp/logseq/graphs/logseq_db_big_graph/export"]
@mkdir-calls))
(is (= expected-path (ffirst @writes)))
(is (instance? js/ArrayBuffer (second (first @writes))))
(is (= [[(str "ZIP exported to " expected-path ".") :success false]]
@notification-calls)))))
(p/catch (fn [e]
(is false (str "unexpected error: " e))))
(p/finally
(fn []
(set! util/electron? original-electron?)
(set! util/time-ms original-time-ms)
(set! config/get-repo-dir original-get-repo-dir)
(set! fs/mkdir-if-not-exists original-mkdir-if-not-exists)
(set! (.-apis js/window) original-apis)
(set! persist-db/<export-db original-export-db)
(set! state/<invoke-db-worker original-invoke-db-worker)
(set! assets-handler/<get-all-assets original-get-all-assets)
(set! notification/show! original-notification-show!)
(done)))))))
(deftest export-blocks-as-markdown-without-properties
(are [expect block-uuid-s]
(= expect

View File

@@ -183,18 +183,55 @@
(is false (str "unexpected error: " e))))
(p/finally (fn [] (done)))))))
(deftest remote-import-db-uses-binary-thread-api
(deftest remote-import-db-uses-raw-binary-endpoint
(async done
(let [calls (atom [])
client {:base-url "http://127.0.0.1:9101"}
(let [captured (atom nil)
client (remote/create-client
{:base-url "http://127.0.0.1:9101"
:fetch-fn (fn [req]
(reset! captured req)
(p/resolved {:status 200
:body (js/JSON.stringify
#js {:ok true
:resultTransit (ldb/write-transit-str nil)})}))})
db (remote/->InRemote client nil nil)
payload (.from js/Buffer "sqlite-bytes")]
(-> (p/with-redefs [remote/invoke! (fn [client' method args]
(swap! calls conj [client' method args])
(p/resolved nil))]
(p/let [_ (protocol/<import-db db "graph-a" payload)]
(is (= [client "thread-api/import-db-binary" ["graph-a" payload]]
(first @calls)))))
payload (js/Uint8Array. #js [1 2 3])]
(-> (p/let [_ (protocol/<import-db db "graph-a" payload)]
(is (= "POST" (:method @captured)))
(is (= "http://127.0.0.1:9101/v1/import-db-binary?repo=graph-a"
(:url @captured)))
(is (identical? payload (:body @captured))))
(p/catch (fn [e]
(is false (str "unexpected error: " e))))
(p/finally (fn [] (done)))))))
(deftest remote-import-db-posts-raw-binary-body
(async done
(let [captured (atom nil)
client (remote/create-client
{:base-url "http://127.0.0.1:9101"
:auth-token "token-1"
:fetch-fn (fn [req]
(reset! captured req)
(p/resolved {:status 200
:body (js/JSON.stringify
#js {:ok true
:resultTransit (ldb/write-transit-str nil)})}))})
db (remote/->InRemote client nil nil)
payload (js/ArrayBuffer. 3)
view (js/Uint8Array. payload)]
(aset view 0 1)
(aset view 1 2)
(aset view 2 3)
(-> (p/let [_ (protocol/<import-db db "graph-a" payload)]
(is (= "POST" (:method @captured)))
(is (= "http://127.0.0.1:9101/v1/import-db-binary?repo=graph-a"
(:url @captured)))
(is (= "application/octet-stream"
(get-in @captured [:headers "Content-Type"])))
(is (= "Bearer token-1"
(get-in @captured [:headers "Authorization"])))
(is (identical? payload (:body @captured))))
(p/catch (fn [e]
(is false (str "unexpected error: " e))))
(p/finally (fn [] (done)))))))

View File

@@ -88,6 +88,15 @@
:headers {"Content-Type" "application/json"}}
payload)))
(defn- invoke-import-db-binary-raw
[host port repo payload]
(http-request {:hostname host
:port port
:path (str "/v1/import-db-binary?repo=" (js/encodeURIComponent repo))
:method "POST"
:headers {"Content-Type" "application/octet-stream"}}
payload))
(defn- lock-path
[root-dir repo]
(db-lock/lock-path root-dir repo))
@@ -982,6 +991,70 @@
:else
(done)))))))))
(deftest db-worker-node-import-db-binary-accepts-raw-request-body
(async done
(let [daemon-a (atom nil)
daemon-b (atom nil)
data-dir (node-helper/create-tmp-dir "db-worker-import-sqlite-raw")
repo-a (str "logseq_db_import_sqlite_raw_a_" (subs (str (random-uuid)) 0 8))
repo-b (str "logseq_db_import_sqlite_raw_b_" (subs (str (random-uuid)) 0 8))
now (js/Date.now)
page-uuid (random-uuid)]
(-> (p/let [{:keys [host port stop!]}
(start-daemon! {:root-dir data-dir
:repo repo-a})
_ (reset! daemon-a {:stop! stop!})
_ (invoke host port "thread-api/create-or-open-db" [repo-a {}])
_ (invoke host port "thread-api/transact"
[repo-a
[{:block/uuid page-uuid
:block/title "Raw SQLite Import Page"
:block/name "raw-sqlite-import-page"
:block/tags #{:logseq.class/Page}
:block/created-at now
:block/updated-at now}]
{}
nil])
export-binary (invoke host port "thread-api/export-db-binary" [repo-a])]
(is (instance? js/Uint8Array export-binary))
(is (pos? (.-byteLength export-binary)))
(p/let [_ ((:stop! @daemon-a))
{:keys [host port stop!]}
(start-daemon! {:root-dir data-dir
:repo repo-b})
_ (reset! daemon-b {:stop! stop!})
{:keys [status body]} (invoke-import-db-binary-raw host port repo-b export-binary)
parsed (js->clj (js/JSON.parse body) :keywordize-keys true)
_ (invoke host port "thread-api/create-or-open-db" [repo-b {}])
result (invoke host port "thread-api/q"
[repo-b
['[:find ?e
:in $ ?title
:where [?e :block/title ?title]]
"Raw SQLite Import Page"]])]
(is (= 200 status))
(is (:ok parsed))
(is (seq result))))
(p/catch (fn [e]
(println "[db-worker-node-test] import-sqlite-raw error:" e)
(is false (str e))))
(p/finally (fn []
(let [stop-a (:stop! @daemon-a)
stop-b (:stop! @daemon-b)]
(cond
(and stop-a stop-b)
(-> (stop-a)
(p/finally (fn [] (-> (stop-b) (p/finally (fn [] (done)))))))
stop-a
(-> (stop-a) (p/finally (fn [] (done))))
stop-b
(-> (stop-b) (p/finally (fn [] (done))))
:else
(done)))))))))
(deftest db-worker-node-export-client-ops-db-binary
(async done
(let [daemon (atom nil)