Merge remote-tracking branch 'origin/master' into refactor/vite-migration

This commit is contained in:
Mega Yu
2026-04-30 11:28:01 +08:00
29 changed files with 1230 additions and 91 deletions

View File

@@ -40,10 +40,10 @@
object]}
:discouraged-var
{rum.core/use-effect! {:message "Use frontend.hooks/use-effect! instead" :level :info}
rum.core/use-memo {:message "Use frontend.hooks/use-memo instead" :level :info}
rum.core/use-layout-effect! {:message "Use frontend.hooks/use-layout-effect! instead" :level :info}
rum.core/use-callback {:message "Use frontend.hooks/use-callback instead" :level :info}}
{rum.core/use-effect! {:message "Use logseq.shui.hooks/use-effect! instead" :level :info}
rum.core/use-memo {:message "Use logseq.shui.hooks/use-memo instead" :level :info}
rum.core/use-layout-effect! {:message "Use logseq.shui.hooks/use-layout-effect! instead" :level :info}
rum.core/use-callback {:message "Use logseq.shui.hooks/use-callback instead" :level :info}}
:unused-namespace {:level :warning
:exclude [logseq.db.common.entity-plus]}
@@ -70,16 +70,14 @@
electron.ipc ipc
electron.utils utils
frontend.commands commands
frontend.common.date common-date
frontend.common.missionary-util c.m
frontend.common.schema-register sr
frontend.common.idb idb
frontend.common.missionary c.m
frontend.common.search-fuzzy fuzzy
frontend.components.block.macros block-macros
frontend.components.class class-component
frontend.components.property property-component
frontend.components.query query
frontend.components.query.result query-result
frontend.components.title title
frontend.config config
frontend.date date
frontend.db db
@@ -88,9 +86,8 @@
frontend.db.query-dsl query-dsl
frontend.db.query-react query-react
frontend.db.react react
frontend.db.util db-utils
frontend.db.utils db-utils
frontend.diff diff
frontend.encrypt encrypt
frontend.extensions.sci sci
frontend.format.block block
frontend.format.mldoc mldoc
@@ -106,15 +103,12 @@
frontend.handler.db-based.page db-page-handler
frontend.handler.db-based.property db-property-handler
frontend.handler.db-based.property.util db-pu
frontend.handler.editor.property editor-property
frontend.handler.events events
frontend.handler.extract extract
frontend.handler.global-config global-config-handler
frontend.handler.notification notification
frontend.handler.page page-handler
frontend.handler.plugin plugin-handler
frontend.handler.plugin-config plugin-config-handler
frontend.handler.property.file property-file
frontend.handler.property.util pu
frontend.handler.query.builder query-builder
frontend.handler.repo repo-handler
@@ -122,8 +116,6 @@
frontend.handler.route route-handler
frontend.handler.search search-handler
frontend.handler.ui ui-handler
frontend.hooks hooks
frontend.idb idb
frontend.loader loader
frontend.mixins mixins
frontend.mobile.util mobile-util
@@ -138,17 +130,15 @@
frontend.ui ui
frontend.util util
frontend.util.page page-util
frontend.util.persist-var persist-var
frontend.util.property property
frontend.util.text text-util
frontend.util.thingatpt thingatpt
frontend.util.url url-util
frontend.util.ref ref
frontend.worker-common.util worker-util
frontend.worker.shared-service shared-service
frontend.worker.handler.page worker-page
frontend.worker.pipeline worker-pipeline
frontend.worker.state worker-state
frontend.worker.util worker-util
lambdaisland.glogi log
logseq.api.db-based db-based-api
logseq.cli.common.graph cli-common-graph
@@ -159,11 +149,12 @@
logseq.cli.common.mcp.tools cli-common-mcp-tools
logseq.cli.text-util cli-text-util
logseq.common.config common-config
logseq.common.date-time-util date-time-util
logseq.common.date common-date
logseq.common.graph common-graph
logseq.common.path path
logseq.common.util common-util
logseq.common.util.block-ref block-ref
logseq.common.util.date-time date-time-util
logseq.common.util.macro macro-util
logseq.common.util.namespace ns-util
logseq.common.util.page-ref page-ref
@@ -195,7 +186,6 @@
logseq.graph-parser.mldoc gp-mldoc
logseq.graph-parser.property gp-property
logseq.graph-parser.text text
logseq.outliner.batch-tx batch-tx
logseq.outliner.core outliner-core
logseq.outliner.datascript-report ds-report
logseq.outliner.op outliner-op
@@ -203,6 +193,7 @@
logseq.outliner.pipeline outliner-pipeline
logseq.outliner.tree otree
logseq.outliner.validate outliner-validate
logseq.shui.hooks hooks
logseq.shui.popup.core shui-popup
logseq.shui.ui shui
medley.core medley

View File

@@ -36,7 +36,45 @@ env:
BABASHKA_VERSION: '1.12.215'
jobs:
test-release:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up pnpm
uses: pnpm/action-setup@v4
with:
version: 10.33.0
- name: Set up Node
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'pnpm'
cache-dependency-path: deps/publish/pnpm-lock.yaml
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: ${{ env.JAVA_VERSION }}
# Clojure needed for test build step
- name: Set up Clojure
uses: DeLaGuardo/setup-clojure@13.5
with:
cli: ${{ env.CLOJURE_VERSION }}
bb: ${{ env.BABASHKA_VERSION }}
- name: Fetch pnpm deps
run: pnpm install --frozen-lockfile
- name: Run publish unit tests
run: pnpm test
build-release:
runs-on: ubuntu-latest
steps:
@@ -94,7 +132,7 @@ jobs:
bb: ${{ env.BABASHKA_VERSION }}
- name: Run clj-kondo lint
run: clojure -M:clj-kondo --lint src
run: clojure -M:clj-kondo --lint src test
- name: Carve lint for unused vars
run: bb lint:carve

View File

@@ -15,6 +15,9 @@
-/resources/static/js/react-force-graph.min.js
-/resources/js/lsplugin.user.js
-/resources/js/pdf_viewer2.js
-/deps/db-sync/test/logseq/db_sync/fixtures/*.edn
-/src/test/fixtures/*.transit
-/src/test/migration/*.transit
-/deps/graph-parser/test/resources/
-/ios/App/App/public
-/android/

View File

@@ -7,11 +7,12 @@
[logseq.common.util :as common-util]))
(def ^:private yyyyMMdd-formatter (tf/formatter "yyyyMMdd"))
(def ^:api default-journal-title-formatter "MMM do, yyyy")
;; (tf/parse (tf/formatter "dd.MM.yyyy") "2021Q4") => 20040120T000000
(defn safe-journal-title-formatters
[date-formatter]
(->> [date-formatter "MMM do, yyyy" "yyyy-MM-dd" "yyyy_MM_dd"]
(->> [date-formatter default-journal-title-formatter "yyyy-MM-dd" "yyyy_MM_dd"]
(remove string/blank?)
distinct))
@@ -106,4 +107,4 @@
tc/from-long
t/to-default-time-zone
(tf/unparse yyyyMMdd-formatter)
parse-long))
parse-long))

View File

@@ -534,6 +534,16 @@
[value]
(contains? #{"true" "1"} value))
(defn- sqlite-too-big-error?
[error]
(let [message (-> (or (ex-message error)
(some-> error .-message)
(str error))
string/lower-case)]
(or (string/includes? message "sqlite_toobig")
(string/includes? message "string or blob too big")
(string/includes? message "statement too long"))))
(defn- handle-sync-snapshot-upload
[^js self request url]
(let [graph-id (graph-id-from-request request)
@@ -556,22 +566,27 @@
(if (and (= encoding snapshot-content-encoding)
(not (exists? js/DecompressionStream)))
(http/error-response "gzip not supported" 500)
(p/let [_ (ensure-schema! self)
_ (when reset?
(storage/set-meta! (.-sql self) snapshot-uploading-meta-key true))
_ (when reset?
(<set-graph-ready-for-use! self graph-id false))
stream (maybe-decompress-stream stream encoding)
count (import-snapshot-stream! self stream reset?)
_ (when finished?
(storage/set-meta! (.-sql self) snapshot-uploading-meta-key false))
_ (when finished?
(when (seq checksum-param)
(storage/set-checksum! (.-sql self) checksum-param)))
_ (when finished?
(<set-graph-ready-for-use! self graph-id true))]
(http/json-response :sync/snapshot-upload {:ok true
:count count})))))))
(p/catch
(p/let [_ (ensure-schema! self)
_ (when reset?
(storage/set-meta! (.-sql self) snapshot-uploading-meta-key true))
_ (when reset?
(<set-graph-ready-for-use! self graph-id false))
stream (maybe-decompress-stream stream encoding)
count (import-snapshot-stream! self stream reset?)
_ (when finished?
(storage/set-meta! (.-sql self) snapshot-uploading-meta-key false))
_ (when finished?
(when (seq checksum-param)
(storage/set-checksum! (.-sql self) checksum-param)))
_ (when finished?
(<set-graph-ready-for-use! self graph-id true))]
(http/json-response :sync/snapshot-upload {:ok true
:count count}))
(fn [error]
(if (sqlite-too-big-error? error)
(http/error-response "snapshot row too large" 413)
(throw error)))))))))
(defn handle [{:keys [^js self request url route]}]
(case (:handler route)

View File

@@ -611,6 +611,35 @@
(is false (str error))
(done)))))))
(deftest snapshot-upload-returns-413-when-sqlite-row-is-too-large-test
(async done
(let [sql (test-sql/make-sql)
conn (d/create-conn db-schema/schema)
self #js {:sql sql
:conn conn
:schema-ready true
:env #js {"DB" nil}}
request (js/Request. "http://localhost/sync/graph-1/snapshot/upload?graph-id=graph-1&finished=true"
#js {:method "POST"
:body (js/Uint8Array. 0)})]
(-> (p/with-redefs [sync-handler/import-snapshot-stream! (fn [_self _stream _reset?]
(p/rejected (js/Error. "string or blob too big: SQLITE_TOOBIG")))
sync-handler/<set-graph-ready-for-use! (fn [_self _graph-id _graph-ready-for-use?]
(p/resolved true))]
(p/let [resp (sync-handler/handle {:self self
:request request
:url (js/URL. (.-url request))
:route {:handler :sync/snapshot-upload}})
text (.text resp)
body (js->clj (js/JSON.parse text) :keywordize-keys true)]
(is (= 413 (.-status resp)))
(is (= {:error "snapshot row too large"} body))))
(p/then (fn []
(done)))
(p/catch (fn [error]
(is false (str error))
(done)))))))
(deftest tx-batch-rejects-when-a-tx-entry-fails-test
(testing "db transact failure rejects the batch"
(let [sql (test-sql/make-sql)

View File

@@ -85,7 +85,7 @@
(merge (select-keys pvalue [:block/created-at :block/updated-at])))
property-value-content')))
(defonce ignored-properties [:logseq.property/created-by-ref])
(defonce ignored-properties [:logseq.property/created-by-ref :logseq.property.embedding/hnsw-label-updated-at])
;; buildable-properties and build-blocks-export depend on each other
(declare build-blocks-export)

View File

@@ -297,8 +297,9 @@
;; for page names to change which breaks looking up journal refs for unconfigured journal pages
(if export-to-db-graph? [date-formatter] (date-time-util/safe-journal-title-formatters date-formatter))))]
(if day
(let [original-page-name' (date-time-util/int->journal-title day date-formatter)]
[original-page-name' (common-util/page-name-sanity-lc original-page-name') day])
(let [original-page-name' (date-time-util/int->journal-title day date-formatter)
default-journal-page-name (date-time-util/int->journal-title day date-time-util/default-journal-title-formatter)]
[original-page-name' (common-util/page-name-sanity-lc default-journal-page-name) day])
[original-page-name page-name day]))))
(def convert-page-if-journal (memoize convert-page-if-journal-impl))

View File

@@ -2,7 +2,6 @@
:api-namespaces [logseq.outliner.datascript-report
logseq.outliner.pipeline
logseq.outliner.cli
logseq.outliner.batch-tx
logseq.outliner.core
logseq.outliner.db-pipeline
logseq.outliner.property

View File

@@ -6,6 +6,7 @@
[datascript.core :as d]
[datascript.impl.entity :as de :refer [Entity]]
[logseq.common.util :as common-util]
[logseq.common.util.date-time :as date-time-util]
[logseq.common.util.page-ref :as page-ref]
[logseq.common.uuid :as common-uuid]
[logseq.db :as ldb]
@@ -300,7 +301,10 @@
(outliner-validate/validate-page-title-characters block-title {:node m*}))
m (if page-title-changed?
(let [_ (outliner-validate/validate-page-title (:block/title m*) {:node m*})
page-name (common-util/page-name-sanity-lc (:block/title m*))]
page-name (if-let [journal-day (:block/journal-day block-entity)]
(common-util/page-name-sanity-lc
(date-time-util/int->journal-title journal-day date-time-util/default-journal-title-formatter))
(common-util/page-name-sanity-lc (:block/title m*)))]
(assoc m* :block/name page-name))
m*)
_ (when (and ;; page or object changed?

View File

@@ -2,6 +2,8 @@
(:require [cljs.test :refer [deftest is testing]]
[datascript.core :as d]
[logseq.common.config :as common-config]
[logseq.common.util :as common-util]
[logseq.common.util.date-time :as date-time-util]
[logseq.common.util.page-ref :as page-ref]
[logseq.db :as ldb]
[logseq.db.common.order :as db-order]
@@ -159,3 +161,16 @@
:block/tags
(map #(:db/ident (d/entity @conn (:db/id %))))))
"New journal only has Journal tag")))
(deftest create-journal-keeps-default-block-name-with-custom-title-format
(let [conn (db-test/create-conn)
_ (d/transact! conn [[:db/add :logseq.class/Journal :logseq.property.journal/title-format "yyyy-MM-dd EEEE"]])
[_ page-uuid] (outliner-page/create! conn "Dec 16th, 2024" {})
page (d/entity @conn [:block/uuid page-uuid])
default-name (-> (:block/journal-day page)
(date-time-util/int->journal-title date-time-util/default-journal-title-formatter)
common-util/page-name-sanity-lc)]
(is (= "2024-12-16 Monday" (:block/title page))
"Journal title follows configured formatter")
(is (= default-name (:block/name page))
"Journal block/name remains the default formatter, independent of title format")))

View File

@@ -7,11 +7,13 @@
"dev": "cd ./worker && pnpm exec wrangler dev",
"watch": "clojure -M:cljs watch publish-worker",
"release": "clojure -M:cljs release publish-worker",
"test": "clojure -M:cljs compile publish-test && node worker/dist/worker-test.js",
"clean": "rm -rf ./worker/dist/",
"bump-publish-version": "node ./scripts/bump-publish-version.js",
"deploy": "pnpm bump-publish-version && pnpm clean && pnpm release && cd ./worker && pnpm exec wrangler deploy --env prod"
},
"dependencies": {
"mldoc": "^1.5.9",
"shadow-cljs": "^3.4.4"
}
}

412
deps/publish/pnpm-lock.yaml generated vendored
View File

@@ -8,12 +8,23 @@ importers:
.:
dependencies:
mldoc:
specifier: ^1.5.9
version: 1.5.9
shadow-cljs:
specifier: ^3.4.4
version: 3.4.4
packages:
ansi-regex@2.1.1:
resolution: {integrity: sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==}
engines: {node: '>=0.10.0'}
ansi-regex@3.0.1:
resolution: {integrity: sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==}
engines: {node: '>=4'}
base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
@@ -23,21 +34,168 @@ packages:
buffer@6.0.3:
resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==}
camelcase@5.3.1:
resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==}
engines: {node: '>=6'}
cliui@4.1.0:
resolution: {integrity: sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==}
code-point-at@1.1.0:
resolution: {integrity: sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==}
engines: {node: '>=0.10.0'}
cross-spawn@6.0.6:
resolution: {integrity: sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==}
engines: {node: '>=4.8'}
decamelize@1.2.0:
resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==}
engines: {node: '>=0.10.0'}
end-of-stream@1.4.5:
resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==}
execa@1.0.0:
resolution: {integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==}
engines: {node: '>=6'}
find-up@3.0.0:
resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==}
engines: {node: '>=6'}
get-caller-file@1.0.3:
resolution: {integrity: sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==}
get-stream@4.1.0:
resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==}
engines: {node: '>=6'}
ieee754@1.2.1:
resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
invert-kv@2.0.0:
resolution: {integrity: sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==}
engines: {node: '>=4'}
is-fullwidth-code-point@1.0.0:
resolution: {integrity: sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@2.0.0:
resolution: {integrity: sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==}
engines: {node: '>=4'}
is-stream@1.1.0:
resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==}
engines: {node: '>=0.10.0'}
isexe@2.0.0:
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
isexe@3.1.1:
resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==}
engines: {node: '>=16'}
lcid@2.0.0:
resolution: {integrity: sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==}
engines: {node: '>=6'}
locate-path@3.0.0:
resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==}
engines: {node: '>=6'}
map-age-cleaner@0.1.3:
resolution: {integrity: sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==}
engines: {node: '>=6'}
mem@4.3.0:
resolution: {integrity: sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==}
engines: {node: '>=6'}
mimic-fn@2.1.0:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'}
mldoc@1.5.9:
resolution: {integrity: sha512-87FQ7hseS87tsk+VdpIigpu8LH+GwmbbFgpxgFwvnbH5oOjmIrc47laH4Dyggzqiy8/vMjDHkl7vsId0eXhCDQ==}
hasBin: true
nice-try@1.0.5:
resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==}
npm-run-path@2.0.2:
resolution: {integrity: sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==}
engines: {node: '>=4'}
number-is-nan@1.0.1:
resolution: {integrity: sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==}
engines: {node: '>=0.10.0'}
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
os-locale@3.1.0:
resolution: {integrity: sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==}
engines: {node: '>=6'}
p-defer@1.0.0:
resolution: {integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==}
engines: {node: '>=4'}
p-finally@1.0.0:
resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==}
engines: {node: '>=4'}
p-is-promise@2.1.0:
resolution: {integrity: sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==}
engines: {node: '>=6'}
p-limit@2.3.0:
resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==}
engines: {node: '>=6'}
p-locate@3.0.0:
resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==}
engines: {node: '>=6'}
p-try@2.2.0:
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
engines: {node: '>=6'}
path-exists@3.0.0:
resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==}
engines: {node: '>=4'}
path-key@2.0.1:
resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==}
engines: {node: '>=4'}
process@0.11.10:
resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==}
engines: {node: '>= 0.6.0'}
pump@3.0.4:
resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==}
readline-sync@1.4.10:
resolution: {integrity: sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==}
engines: {node: '>= 0.8.0'}
require-directory@2.1.1:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
require-main-filename@1.0.1:
resolution: {integrity: sha512-IqSUtOVP4ksd1C/ej5zeEh/BIP2ajqpn8c5x+q99gvcIG/Qf0cud5raVnE/Dwd0ua9TXYDoDc0RE5hBSdz22Ug==}
semver@5.7.2:
resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==}
hasBin: true
set-blocking@2.0.0:
resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==}
shadow-cljs-jar@1.3.4:
resolution: {integrity: sha512-cZB2pzVXBnhpJ6PQdsjO+j/MksR28mv4QD/hP/2y1fsIa9Z9RutYgh3N34FZ8Ktl4puAXaIGlct+gMCJ5BmwmA==}
@@ -46,6 +204,17 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
shebang-command@1.2.0:
resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==}
engines: {node: '>=0.10.0'}
shebang-regex@1.0.0:
resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==}
engines: {node: '>=0.10.0'}
signal-exit@3.0.7:
resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
@@ -53,11 +222,45 @@ packages:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
string-width@1.0.2:
resolution: {integrity: sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==}
engines: {node: '>=0.10.0'}
string-width@2.1.1:
resolution: {integrity: sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==}
engines: {node: '>=4'}
strip-ansi@3.0.1:
resolution: {integrity: sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==}
engines: {node: '>=0.10.0'}
strip-ansi@4.0.0:
resolution: {integrity: sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==}
engines: {node: '>=4'}
strip-eof@1.0.0:
resolution: {integrity: sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==}
engines: {node: '>=0.10.0'}
which-module@2.0.1:
resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==}
which@1.3.1:
resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==}
hasBin: true
which@5.0.0:
resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==}
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
wrap-ansi@2.1.0:
resolution: {integrity: sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw==}
engines: {node: '>=0.10.0'}
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
@@ -70,8 +273,21 @@ packages:
utf-8-validate:
optional: true
y18n@4.0.3:
resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==}
yargs-parser@11.1.1:
resolution: {integrity: sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==}
yargs@12.0.5:
resolution: {integrity: sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==}
snapshots:
ansi-regex@2.1.1: {}
ansi-regex@3.0.1: {}
base64-js@1.5.1: {}
buffer-from@1.1.2: {}
@@ -81,14 +297,146 @@ snapshots:
base64-js: 1.5.1
ieee754: 1.2.1
camelcase@5.3.1: {}
cliui@4.1.0:
dependencies:
string-width: 2.1.1
strip-ansi: 4.0.0
wrap-ansi: 2.1.0
code-point-at@1.1.0: {}
cross-spawn@6.0.6:
dependencies:
nice-try: 1.0.5
path-key: 2.0.1
semver: 5.7.2
shebang-command: 1.2.0
which: 1.3.1
decamelize@1.2.0: {}
end-of-stream@1.4.5:
dependencies:
once: 1.4.0
execa@1.0.0:
dependencies:
cross-spawn: 6.0.6
get-stream: 4.1.0
is-stream: 1.1.0
npm-run-path: 2.0.2
p-finally: 1.0.0
signal-exit: 3.0.7
strip-eof: 1.0.0
find-up@3.0.0:
dependencies:
locate-path: 3.0.0
get-caller-file@1.0.3: {}
get-stream@4.1.0:
dependencies:
pump: 3.0.4
ieee754@1.2.1: {}
invert-kv@2.0.0: {}
is-fullwidth-code-point@1.0.0:
dependencies:
number-is-nan: 1.0.1
is-fullwidth-code-point@2.0.0: {}
is-stream@1.1.0: {}
isexe@2.0.0: {}
isexe@3.1.1: {}
lcid@2.0.0:
dependencies:
invert-kv: 2.0.0
locate-path@3.0.0:
dependencies:
p-locate: 3.0.0
path-exists: 3.0.0
map-age-cleaner@0.1.3:
dependencies:
p-defer: 1.0.0
mem@4.3.0:
dependencies:
map-age-cleaner: 0.1.3
mimic-fn: 2.1.0
p-is-promise: 2.1.0
mimic-fn@2.1.0: {}
mldoc@1.5.9:
dependencies:
yargs: 12.0.5
nice-try@1.0.5: {}
npm-run-path@2.0.2:
dependencies:
path-key: 2.0.1
number-is-nan@1.0.1: {}
once@1.4.0:
dependencies:
wrappy: 1.0.2
os-locale@3.1.0:
dependencies:
execa: 1.0.0
lcid: 2.0.0
mem: 4.3.0
p-defer@1.0.0: {}
p-finally@1.0.0: {}
p-is-promise@2.1.0: {}
p-limit@2.3.0:
dependencies:
p-try: 2.2.0
p-locate@3.0.0:
dependencies:
p-limit: 2.3.0
p-try@2.2.0: {}
path-exists@3.0.0: {}
path-key@2.0.1: {}
process@0.11.10: {}
pump@3.0.4:
dependencies:
end-of-stream: 1.4.5
once: 1.4.0
readline-sync@1.4.10: {}
require-directory@2.1.1: {}
require-main-filename@1.0.1: {}
semver@5.7.2: {}
set-blocking@2.0.0: {}
shadow-cljs-jar@1.3.4: {}
shadow-cljs@3.4.4:
@@ -104,6 +452,14 @@ snapshots:
- bufferutil
- utf-8-validate
shebang-command@1.2.0:
dependencies:
shebang-regex: 1.0.0
shebang-regex@1.0.0: {}
signal-exit@3.0.7: {}
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
@@ -111,8 +467,64 @@ snapshots:
source-map@0.6.1: {}
string-width@1.0.2:
dependencies:
code-point-at: 1.1.0
is-fullwidth-code-point: 1.0.0
strip-ansi: 3.0.1
string-width@2.1.1:
dependencies:
is-fullwidth-code-point: 2.0.0
strip-ansi: 4.0.0
strip-ansi@3.0.1:
dependencies:
ansi-regex: 2.1.1
strip-ansi@4.0.0:
dependencies:
ansi-regex: 3.0.1
strip-eof@1.0.0: {}
which-module@2.0.1: {}
which@1.3.1:
dependencies:
isexe: 2.0.0
which@5.0.0:
dependencies:
isexe: 3.1.1
wrap-ansi@2.1.0:
dependencies:
string-width: 1.0.2
strip-ansi: 3.0.1
wrappy@1.0.2: {}
ws@8.18.3: {}
y18n@4.0.3: {}
yargs-parser@11.1.1:
dependencies:
camelcase: 5.3.1
decamelize: 1.2.0
yargs@12.0.5:
dependencies:
cliui: 4.1.0
decamelize: 1.2.0
find-up: 3.0.0
get-caller-file: 1.0.3
os-locale: 3.1.0
require-directory: 2.1.1
require-main-filename: 1.0.1
set-blocking: 2.0.0
string-width: 2.1.1
which-module: 2.0.1
y18n: 4.0.3
yargs-parser: 11.1.1

View File

@@ -0,0 +1,100 @@
(ns logseq.publish.common-test
(:require [cljs.test :refer [async deftest is testing]]
[logseq.publish.common :as publish-common]
[promesa.core :as p]))
(deftest merge-headers-overrides-and-preserves-values
(let [headers (publish-common/merge-headers #js {"a" "1" "keep" "ok"}
#js {"a" "2" "b" "3"})]
(is (= "2" (.get headers "a")))
(is (= "3" (.get headers "b")))
(is (= "ok" (.get headers "keep")))))
(deftest parse-meta-header-valid-and-invalid-json
(testing "valid json meta header is parsed into keywordized map"
(let [request (js/Request. "https://publish.example/pages"
#js {:headers #js {"x-publish-meta" "{\"content_hash\":\"h\",\"graph\":\"g\",\"page_uuid\":\"p\"}"}})
meta (publish-common/parse-meta-header request)]
(is (= "h" (:content_hash meta)))
(is (= "g" (:graph meta)))
(is (= "p" (:page_uuid meta)))))
(testing "invalid json returns nil"
(let [request (js/Request. "https://publish.example/pages"
#js {:headers #js {"x-publish-meta" "{not-json"}})]
(is (nil? (publish-common/parse-meta-header request))))))
(deftest valid-meta-requires-core-fields
(is (some? (publish-common/valid-meta? {:content_hash "h" :graph "g" :page_uuid "p"})))
(is (nil? (publish-common/valid-meta? {:content_hash "h" :graph "g"})))
(is (nil? (publish-common/valid-meta? nil))))
(deftest get-sql-rows-handles-supported-shapes
(is (= [] (publish-common/get-sql-rows nil)))
(is (= [{"a" 1}]
(js->clj (publish-common/get-sql-rows #js {:rows #js [#js {"a" 1}]})
:keywordize-keys false)))
(let [row (js-obj)]
(aset row "rows" #js [#js {"b" 2}])
(is (= [{"rows" [{"b" 2}]}]
(js->clj (publish-common/get-sql-rows #js [row])
:keywordize-keys false)))))
(deftest json-error-response-helpers-set-status
(async done
(-> (p/let [unauthorized (publish-common/unauthorized)
forbidden (publish-common/forbidden)
bad-request (publish-common/bad-request "bad")
not-found (publish-common/not-found)
unauthorized-body (.json unauthorized)
forbidden-body (.json forbidden)
bad-request-body (.json bad-request)
not-found-body (.json not-found)]
(is (= 401 (.-status unauthorized)))
(is (= 403 (.-status forbidden)))
(is (= 400 (.-status bad-request)))
(is (= 404 (.-status not-found)))
(is (= "unauthorized" (aget unauthorized-body "error")))
(is (= "forbidden" (aget forbidden-body "error")))
(is (= "bad" (aget bad-request-body "error")))
(is (= "not found" (aget not-found-body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest normalize-etag-removes-double-quotes
(is (= "abc" (publish-common/normalize-etag "\"abc\"")))
(is (= "abc" (publish-common/normalize-etag "abc")))
(is (nil? (publish-common/normalize-etag nil))))
(deftest encode-path-encodes-path-segments
(is (= "with%20space/plus%2Bsign" (publish-common/encode-path "with space/plus+sign"))))
(deftest short-id-for-page-is-deterministic-and-fixed-length
(async done
(-> (p/let [a (publish-common/short-id-for-page "graph-1" "page-1")
b (publish-common/short-id-for-page "graph-1" "page-1")
c (publish-common/short-id-for-page "graph-1" "page-2")]
(is (= 10 (count a)))
(is (= a b))
(is (not= a c))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest hash-and-verify-password-roundtrip
(async done
(-> (p/let [hashed (publish-common/hash-password "secret-value")
ok? (publish-common/verify-password "secret-value" hashed)
wrong? (publish-common/verify-password "wrong-value" hashed)]
(is (string? hashed))
(is (true? ok?))
(is (false? wrong?))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest verify-password-rejects-invalid-hash-format
(is (false? (publish-common/verify-password "secret" "not-a-valid-hash"))))

View File

@@ -15,3 +15,13 @@
ctx {:property-hidden-by-ident {}}
result (render/entity-properties entity ctx {})]
(is (nil? (get result :logseq.property/created-from-property))))))
(deftest filter-tags-removes-built-in-tag-values
(testing "built-in class keyword tags are removed"
(let [result (render/filter-tags [:logseq.class/Tag :user.property/custom] {})]
(is (= [:user.property/custom] result))))
(testing "built-in class entities are removed"
(let [entities {1 {:db/ident :logseq.class/Tag}
2 {:db/ident :user.property/custom}}
result (render/filter-tags [1 2] entities)]
(is (= [2] result)))))

View File

@@ -1,5 +1,7 @@
(ns logseq.publish.routes-test
(:require [cljs.test :refer [async deftest is testing]]
[logseq.common.authorization :as authorization]
[logseq.publish.common :as publish-common]
[logseq.publish.routes :as routes]
[promesa.core :as p]))
@@ -42,6 +44,37 @@
#js {"PUBLISH_META_DO" do-ns
"PUBLISH_R2" r2}))
(defn- empty-env []
#js {})
(defn- json-error-response
[status message]
(js/Response.
(js/JSON.stringify #js {"error" message})
#js {:status status
:headers #js {"content-type" "application/json"}}))
(defn- ok-json-response
[data]
(js/Response.
(js/JSON.stringify data)
#js {:status 200
:headers #js {"content-type" "application/json"}}))
(defn- method-from-opts
[opts]
(or (some-> opts (aget "method"))
"GET"))
(defn- permission-env
[route-dispatch]
(let [do-ns #js {:idFromName (fn [name] name)
:get (fn [id]
#js {:fetch (fn [url opts]
(js/Promise.resolve
(route-dispatch id url (method-from-opts opts))))})}]
#js {"PUBLISH_META_DO" do-ns}))
(deftest short-url-does-not-redirect-to-uuid-url
(testing "short URL should not redirect to /page/:graph/:page"
(async done
@@ -89,3 +122,272 @@
(p/catch (fn [error]
(is nil (str error))
(done))))))))
(deftest options-route-returns-cors-no-content
(async done
(let [request (js/Request. "https://publish.example/any"
#js {:method "OPTIONS"})]
(-> (p/let [response (routes/handle-fetch request (empty-env))]
(is (= 204 (.-status response)))
(is (= "*" (.get (.-headers response) "access-control-allow-origin")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest home-route-renders-html
(async done
(let [request (js/Request. "https://publish.example/")]
(-> (p/let [response (routes/handle-fetch request (empty-env))
body (.text response)]
(is (= 200 (.-status response)))
(is (re-find #"<!doctype html>" body))
(is (re-find #"Logseq Publish" body))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest static-assets-return-content-types
(async done
(-> (p/let [css-resp (routes/handle-fetch (js/Request. "https://publish.example/static/publish.css") (empty-env))
js-resp (routes/handle-fetch (js/Request. "https://publish.example/static/publish.js") (empty-env))
ext-resp (routes/handle-fetch (js/Request. "https://publish.example/static/tabler.ext.js") (empty-env))]
(is (= "text/css; charset=utf-8" (.get (.-headers css-resp) "content-type")))
(is (= "text/javascript; charset=utf-8" (.get (.-headers js-resp) "content-type")))
(is (= "text/javascript; charset=utf-8" (.get (.-headers ext-resp) "content-type")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest short-url-missing-id-returns-bad-request
(async done
(-> (p/let [response (routes/handle-fetch (js/Request. "https://publish.example/p/") (empty-env))
body (.json response)]
(is (= 400 (.-status response)))
(is (= "missing short id" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest short-url-not-found-returns-not-found
(async done
(let [do-stub #js {:fetch (fn [_url _opts]
(js/Promise.resolve (js/Response. "nope" #js {:status 404})))}
do-ns #js {:idFromName (fn [_name] "index")
:get (fn [_id] do-stub)}
env #js {"PUBLISH_META_DO" do-ns}]
(-> (p/let [response (routes/handle-fetch (js/Request. "https://publish.example/p/abc") env)
body (.json response)]
(is (= 404 (.-status response)))
(is (= "not found" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest asset-route-validates-missing-or-invalid-id
(async done
(-> (p/let [missing-id-resp (routes/handle-fetch (js/Request. "https://publish.example/asset//") (empty-env))
missing-id-body (.json missing-id-resp)
invalid-id-resp (routes/handle-fetch (js/Request. "https://publish.example/asset/g/noext") (empty-env))
invalid-id-body (.json invalid-id-resp)]
(is (= 400 (.-status missing-id-resp)))
(is (= "missing asset id" (aget missing-id-body "error")))
(is (= 400 (.-status invalid-id-resp)))
(is (= "invalid asset id" (aget invalid-id-body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest user-route-validates-missing-username
(async done
(-> (p/let [response (routes/handle-fetch (js/Request. "https://publish.example/u/") (empty-env))
body (.json response)]
(is (= 400 (.-status response)))
(is (= "missing username" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest post-pages-without-auth-is-unauthorized
(async done
(-> (p/let [request (js/Request. "https://publish.example/pages"
#js {:method "POST"
:headers #js {"content-type" "application/transit+json"}
:body "{}"})
response (routes/handle-fetch request (empty-env))
body (.json response)]
(is (= 401 (.-status response)))
(is (= "unauthorized" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest delete-page-without-auth-is-unauthorized
(async done
(-> (p/let [request (js/Request. "https://publish.example/pages/graph-1/page-1"
#js {:method "DELETE"})
response (routes/handle-fetch request (empty-env))
body (.json response)]
(is (= 401 (.-status response)))
(is (= "unauthorized" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done))))))
(deftest delete-page-owner-mismatch-is-forbidden
(async done
(let [env (permission-env
(fn [id url method]
(cond
(and (= id "index")
(= method "GET")
(= url "https://publish/pages/graph-1/page-1"))
(ok-json-response #js {"owner_sub" "owner-a"})
:else
(json-error-response 404 "not found"))))
request (js/Request. "https://publish.example/pages/graph-1/page-1"
#js {:method "DELETE"
:headers #js {"authorization" "Bearer token"}})]
(-> (p/let [response (p/with-redefs [authorization/verify-jwt (fn [_ _] #js {"sub" "owner-b"})]
(routes/handle-fetch request env))
body (.json response)]
(is (= 403 (.-status response)))
(is (= "forbidden" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest delete-page-owner-match-succeeds
(async done
(let [env (permission-env
(fn [id url method]
(cond
(and (= id "index")
(= method "GET")
(= url "https://publish/pages/graph-1/page-1"))
(ok-json-response #js {"owner_sub" "owner-a"})
(and (= id "index")
(= method "DELETE")
(= url "https://publish/pages/graph-1/page-1"))
(ok-json-response #js {"ok" true})
(and (= id "graph-1:page-1")
(= method "DELETE")
(= url "https://publish/pages/graph-1/page-1"))
(ok-json-response #js {"ok" true})
:else
(json-error-response 404 "not found"))))
request (js/Request. "https://publish.example/pages/graph-1/page-1"
#js {:method "DELETE"
:headers #js {"authorization" "Bearer token"}})]
(-> (p/let [response (p/with-redefs [authorization/verify-jwt (fn [_ _] #js {"sub" "owner-a"})]
(routes/handle-fetch request env))
body (.json response)]
(is (= 200 (.-status response)))
(is (true? (aget body "ok")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest delete-graph-owner-mismatch-is-forbidden
(async done
(let [env (permission-env
(fn [id url method]
(cond
(and (= id "index")
(= method "GET")
(= url "https://publish/pages/graph-1"))
(ok-json-response #js {"pages" #js [#js {"owner_sub" "owner-a" "page_uuid" "page-1"}]})
:else
(json-error-response 404 "not found"))))
request (js/Request. "https://publish.example/pages/graph-1"
#js {:method "DELETE"
:headers #js {"authorization" "Bearer token"}})]
(-> (p/let [response (p/with-redefs [authorization/verify-jwt (fn [_ _] #js {"sub" "owner-b"})]
(routes/handle-fetch request env))
body (.json response)]
(is (= 403 (.-status response)))
(is (= "forbidden" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest delete-graph-owner-match-succeeds
(async done
(let [env (permission-env
(fn [id url method]
(cond
(and (= id "index")
(= method "GET")
(= url "https://publish/pages/graph-1"))
(ok-json-response #js {"pages" #js [#js {"owner_sub" "owner-a" "page_uuid" "page-1"}
#js {"owner_sub" "owner-a" "page_uuid" "page-2"}]})
(and (= id "index")
(= method "DELETE")
(= url "https://publish/pages/graph-1"))
(ok-json-response #js {"ok" true})
(and (= method "DELETE")
(or (and (= id "graph-1:page-1") (= url "https://publish/pages/graph-1/page-1"))
(and (= id "graph-1:page-2") (= url "https://publish/pages/graph-1/page-2"))))
(ok-json-response #js {"ok" true})
:else
(json-error-response 404 "not found"))))
request (js/Request. "https://publish.example/pages/graph-1"
#js {:method "DELETE"
:headers #js {"authorization" "Bearer token"}})]
(-> (p/let [response (p/with-redefs [authorization/verify-jwt (fn [_ _] #js {"sub" "owner-a"})]
(routes/handle-fetch request env))
body (.json response)]
(is (= 200 (.-status response)))
(is (true? (aget body "ok")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))
(deftest get-page-requires-correct-password-when-protected
(async done
(let [env (permission-env
(fn [id url method]
(cond
(and (= id "graph-1:page-1")
(= method "GET")
(= url "https://publish/pages/graph-1/page-1"))
(ok-json-response #js {"content_hash" "h-1"
"r2_key" "publish/graph-1/page-1.transit"})
(and (= id "index")
(= method "GET")
(= url "https://publish/pages/graph-1/page-1/password"))
(ok-json-response #js {"password_hash" "pbkdf2$sha256$90000$x$y"})
:else
(json-error-response 404 "not found"))))
request (js/Request. "https://publish.example/pages/graph-1/page-1")]
(-> (p/let [response (p/with-redefs [publish-common/verify-password (fn [_ _] (p/resolved false))]
(routes/handle-fetch request env))
body (.json response)]
(is (= 401 (.-status response)))
(is (= "password required" (aget body "error")))
(done))
(p/catch (fn [error]
(is nil (str error))
(done)))))))

View File

@@ -1,5 +1,6 @@
(ns logseq.publish.test-runner
(:require [cljs.test :as ct]
[logseq.publish.common-test]
[logseq.publish.render-test]
[logseq.publish.routes-test]
[shadow.test :as st]

File diff suppressed because one or more lines are too long

View File

@@ -5,9 +5,6 @@
(def ^:private kondo-src-test-step
{:runner :cmd :name "clj-kondo (src test)" :cmd "clojure -M:clj-kondo --lint src test --cache false"})
(def ^:private kondo-src-step
{:runner :cmd :name "clj-kondo (src)" :cmd "clojure -M:clj-kondo --lint src --cache false"})
(def ^:private dep-plan
[{:dep "deps/common"
:steps [kondo-src-test-step
@@ -53,11 +50,11 @@
{:runner :bb :name "lint:minimize-public-vars" :cmd "lint:minimize-public-vars"}
{:runner :cmd :name "pnpm exec nbb-logseq (-e long)" :cmd "pnpm exec nbb-logseq -cp test -m nextjournal.test-runner -e long"}]}
{:dep "deps/publish"
:steps [kondo-src-step
:steps [kondo-src-test-step
{:runner :bb :name "lint:large-vars" :cmd "lint:large-vars"}
{:runner :bb :name "lint:carve" :cmd "lint:carve"}
{:runner :bb :name "lint:ns-docstrings" :cmd "lint:ns-docstrings"}
{:runner :skip :name "tests" :reason "no test script in deps/publish/package.json"}]}
{:runner :cmd :name "pnpm test" :cmd "pnpm test"}]}
{:dep "deps/publishing"
:steps [kondo-src-test-step
{:runner :bb :name "lint:large-vars" :cmd "lint:large-vars"}

View File

@@ -754,7 +754,9 @@
(last child)
(let [{:keys [content children]} (last child)
page-name (subs content 2 (- (count content) 2))]
(rum/with-key (page-reference (assoc config :children children) page-name nil) page-name))))
(rum/with-key (page-reference (assoc config :children children)
(or (:block/uuid page-entity) page-name)
nil) page-name))))
(cond
(and label
(string? label)
@@ -3018,7 +3020,7 @@
:disable-preview? true)]
(when (seq parents)
(let [parents-props (doall
(for [{:block/keys [uuid name title] :as block} parents]
(for [{:block/keys [uuid name] :as block} parents]
(if name
[block (page-cp (cond-> {:disable-preview? true}
disabled?
@@ -3027,7 +3029,7 @@
(let [result (block/parse-title-and-body
uuid
(get block :block/format :markdown)
title)
(:block/raw-title block))
ast-body (:block.temp/ast-body result)
ast-title (:block.temp/ast-title result)
config (assoc config :block/uuid uuid)]

View File

@@ -433,7 +433,7 @@
(let [format (util/evalue e)]
(when-not (string/blank? format)
(p/do!
(property-handler/set-block-property! :logseq.class/Journal
(property-handler/set-block-property! (:block/uuid (db/entity :logseq.class/Journal))
:logseq.property.journal/title-format
format)
(notification/show! (t :settings.general/refresh-required-feedback)))

View File

@@ -215,7 +215,7 @@
(defn backup-db-graph
[repo]
(when-not (util/capacitor?)
(when util/web-platform?
(web-backup-db-graph repo)))
(defonce *backup-interval (atom nil))

View File

@@ -25,6 +25,9 @@
(defn- ->block-id
[block-or-id]
(cond
(keyword? block-or-id)
(:block/uuid (db-utils/entity block-or-id))
(de/entity? block-or-id)
(:block/uuid block-or-id)

View File

@@ -21,7 +21,7 @@
(not (ldb/journal? entity))
(not (:logseq.property/built-in? entity))
(not (= :logseq.property/query (:db/ident (:logseq.property/created-from-property entity)))))))
(d/datom e a (str "debug " e) t)
(d/datom e a (str "debug " e " " (apply str (repeat (count v) "x"))) t)
:else
(d/datom e a v t))))))

View File

@@ -594,8 +594,6 @@ DROP TRIGGER IF EXISTS blocks_au;
(when-not (string/blank? q)
(let [option (assoc option :enable-snippet? enable-snippet?)
match-input (get-match-input q)
page-count (count (d/datoms @conn :avet :block/name))
large-graph? (> page-count 2500)
non-match-input (when (<= (count q) 2)
(str "%" (string/replace q #"\s+" "%") "%"))
limit (or limit 100)
@@ -613,11 +611,9 @@ DROP TRIGGER IF EXISTS blocks_au;
(->> (search-blocks-aux search-db non-match-sql q non-match-input page limit-p)
(map (fn [result]
(assoc result :keyword-score (fuzzy/score q (:title result)))))))
;; fuzzy is too slow for large graphs
fuzzy-result (when-not (or page large-graph?)
(->> (fuzzy-search repo @conn q option)
(map (fn [result]
(assoc result :keyword-score (fuzzy/score q (:title result)))))))
fuzzy-result (->> (fuzzy-search repo @conn q option)
(map (fn [result]
(assoc result :keyword-score (fuzzy/score q (:title result))))))
;; _ (prn :debug "Search results before combine:" enable-snippet? (map :snippet matched-result))
;; _ (doseq [item (concat fuzzy-result matched-result)]
;; (prn :debug :keyword-search-result item))

View File

@@ -17,8 +17,11 @@
[logseq.db.sqlite.util :as sqlite-util]
[promesa.core :as p]))
(def upload-kvs-batch-size 2000)
(def upload-kvs-batch-size 500)
(def upload-prepare-datoms-batch-size 100000)
(def snapshot-upload-max-bytes 1000000)
(def snapshot-frame-header-bytes 4)
(def ignored-oversized-upload-attrs #{:logseq.property.tldraw/page})
(def snapshot-content-type "application/transit+json")
(def snapshot-content-encoding "gzip")
(def snapshot-text-encoder (js/TextEncoder.))
@@ -63,6 +66,66 @@
[rows]
(.encode snapshot-text-encoder (sqlite-util/write-transit-str rows)))
(defn- datom-value-byte-length
[value]
(.-byteLength ^js (.encode snapshot-text-encoder (sqlite-util/write-transit-str value))))
(defn- drop-oversized-upload-datoms
[datoms]
(let [threshold (- snapshot-upload-max-bytes snapshot-frame-header-bytes)]
(reduce (fn [{:keys [kept dropped]} datom]
(let [attr (:a datom)
size (when (contains? ignored-oversized-upload-attrs attr)
(datom-value-byte-length (:v datom)))]
(if (and size (> size threshold))
{:kept kept
:dropped (conj dropped {:a attr
:e (:e datom)
:bytes size})}
{:kept (conj kept datom)
:dropped dropped})))
{:kept []
:dropped []}
datoms)))
(defn- snapshot-rows-byte-length
[rows]
(+ snapshot-frame-header-bytes
(.-byteLength ^js (encode-snapshot-rows rows))))
(defn- max-prefix-rows-within-bytes
[rows max-bytes]
(let [rows-count (count rows)]
(loop [low 1
high rows-count
best 0]
(if (> low high)
best
(let [mid (quot (+ low high) 2)
rows' (subvec rows 0 mid)
size (snapshot-rows-byte-length rows')]
(if (<= size max-bytes)
(recur (inc mid) high mid)
(recur low (dec mid) best)))))))
(defn- split-snapshot-rows-by-max-bytes
[rows max-bytes]
(loop [remaining rows
batches []]
(if (empty? remaining)
batches
(let [prefix-count (max-prefix-rows-within-bytes remaining max-bytes)]
(if (pos? prefix-count)
(let [batch (subvec remaining 0 prefix-count)
remaining' (subvec remaining prefix-count)]
(recur remaining' (conj batches batch)))
(let [row (first remaining)
row-size (snapshot-rows-byte-length [row])]
(fail-fast :db-sync/snapshot-row-too-large
{:max-bytes max-bytes
:row-size row-size
:addr (first row)})))))))
(defn frame-bytes
[^js data]
(let [len (.-byteLength data)
@@ -98,6 +161,30 @@
{:body buf :encoding snapshot-content-encoding})
(p/resolved {:body frame :encoding nil}))))
(defn- snapshot-upload-url
[base graph-id reset? finished? checksum]
(str base "/sync/" graph-id "/snapshot/upload?reset="
(if reset? "true" "false")
"&finished="
(if finished? "true" "false")
(when finished?
(str "&checksum=" (js/encodeURIComponent checksum)))))
(defn- <upload-snapshot-rows-batches!
[rows-batches {:keys [base graph-id first-batch? finished? checksum auth-fetch-f]}]
(p/loop [remaining rows-batches
first-request? first-batch?]
(if-let [rows-batch (first remaining)]
(let [last-request? (nil? (next remaining))
finished-request? (and finished? last-request?)
upload-url (snapshot-upload-url base graph-id first-request? finished-request? checksum)]
(p/let [{:keys [body encoding]} (<snapshot-upload-body rows-batch)
headers (cond-> {"content-type" snapshot-content-type}
(string? encoding) (assoc "content-encoding" encoding))
_ (auth-fetch-f upload-url headers body)]
(p/recur (next remaining) false)))
nil)))
(defn set-graph-sync-metadata!
[repo graph-e2ee?]
(when-let [conn (worker-state/get-datascript-conn repo)]
@@ -126,9 +213,16 @@
(fn [batch]
(p/let [datoms* (sync-large-title/offload-large-titles-in-datoms-batch
repo graph-id batch aes-key sync-apply/upload-large-title!)
{:keys [kept dropped]} (drop-oversized-upload-datoms datoms*)
_ (when (seq dropped)
(prn :db-sync/drop-oversized-upload-datoms
{:repo repo
:count (count dropped)
:attrs (vec (distinct (map :a dropped)))
:max-bytes (apply max (map :bytes dropped))}))
encrypted-datoms (if aes-key
(sync-crypt/<encrypt-datoms aes-key datoms*)
datoms*)
(sync-crypt/<encrypt-datoms aes-key kept)
kept)
tx-data (mapv sync-large-title/datom->tx encrypted-datoms)]
(d/transact! (:conn temp) tx-data {:initial-db? true})
nil))
@@ -186,25 +280,38 @@
rows* (normalize-snapshot-rows rows)
loaded' (+ loaded (count rows*))
finished? (= loaded' total-rows)
upload-url (str base "/sync/" graph-id "/snapshot/upload?reset="
(if first-batch? "true" "false")
"&finished="
(if finished? "true" "false")
(when finished?
(str "&checksum=" (js/encodeURIComponent snapshot-checksum))))]
(p/let [{:keys [body encoding]} (<snapshot-upload-body rows*)
headers (cond-> {"content-type" snapshot-content-type}
(string? encoding) (assoc "content-encoding" encoding))
_ (sync-transport/fetch-json
(fn [opts]
(sync-auth/with-auth-headers
#(sync-auth/auth-headers (worker-state/get-id-token))
opts))
upload-url
{:method "POST"
:headers headers
:body body}
{:response-schema :sync/snapshot-upload})]
row-batches (split-snapshot-rows-by-max-bytes rows* snapshot-upload-max-bytes)
batch-payloads
(mapv (fn [rows-batch]
{:rows (count rows-batch)
:payload-bytes (snapshot-rows-byte-length rows-batch)})
row-batches)]
(prn :db-sync/upload-kvs-batch
{:total-kvs-rows total-rows
:fetched-kvs-rows (count rows*)
:upload-kvs-batch-size upload-kvs-batch-size
:split-batch-count (count row-batches)
:split-batches batch-payloads
:max-request-bytes snapshot-upload-max-bytes})
(p/let [_ (<upload-snapshot-rows-batches!
row-batches
{:base base
:graph-id graph-id
:first-batch? first-batch?
:finished? finished?
:checksum snapshot-checksum
:auth-fetch-f
(fn [upload-url headers body]
(sync-transport/fetch-json
(fn [opts]
(sync-auth/with-auth-headers
#(sync-auth/auth-headers (worker-state/get-id-token))
opts))
upload-url
{:method "POST"
:headers headers
:body body}
{:response-schema :sync/snapshot-upload}))})]
(update-progress {:sub-type :upload-progress
:message (str "Uploading " loaded' "/" total-rows)})
(p/recur max-addr false loaded'))))))

View File

@@ -1663,11 +1663,11 @@
:settings.sync-server/url "Sync Server URL"
:settings.sync-server/url-desc "Set a custom HTTPS sync server URL for self-hosted sync. Your Logseq authentication tokens will be sent to this server, so only use a trusted URL. Leave empty to use the official Logseq Sync."
:settings.sync-server/url-invalid-error "URL must start with https:// or http://"
:settings-page/publish-server-url "Publish server URL"
:settings-page/publish-server-url-desc "Set a custom HTTPS publish server URL for self-hosted single-page publishing. Your Logseq authentication tokens will be sent to this server, so only use a trusted URL. Leave empty to use the official Logseq publish service."
:settings-page/publish-server-url "Publish Server URL"
:settings-page/publish-server-url-desc "Set a custom HTTPS publish server URL for self-hosted single-page publishing. Your Logseq authentication tokens will be sent to this server, so only use a trusted URL. Leave empty to use the official Logseq Publish service."
:settings-page/publish-server-url-saved "Publish server URL saved."
:settings-page/publish-server-url-cleared "Publish server URL cleared. Using official Logseq publish."
:settings-page/publish-server-url-default "Logseq publish"
:settings-page/publish-server-url-cleared "Publish server URL cleared. Using official Logseq Publish."
:settings-page/publish-server-url-default "Logseq Publish"
:settings-page/publish-server-url-reset "Reset to default"
:shell/input-command-title "Input command"

View File

@@ -2,9 +2,12 @@
(:require [cljs.test :refer [deftest is testing]]
[datascript.core :as d]
[frontend.worker.pipeline :as worker-pipeline]
[logseq.common.util :as common-util]
[logseq.common.util.date-time :as date-time-util]
[logseq.db :as ldb]
[logseq.db.common.order :as db-order]
[logseq.db.test.helper :as db-test]))
[logseq.db.test.helper :as db-test]
[logseq.outliner.page :as outliner-page]))
(deftest test-built-in-page-updates-that-should-be-reverted
(let [conn (db-test/create-conn-with-blocks
@@ -145,6 +148,21 @@
(is (= "page1-renamed"
(:block/title (d/entity (:db-after result) (:db/id page1)))))))))
(deftest create-journal-page-name-uses-default-formatter-test
(let [conn (db-test/create-conn)]
(d/transact! conn [[:db/add :logseq.class/Journal :logseq.property.journal/title-format "yyyy-MM-dd EEEE"]])
(let [[_ page-uuid] (outliner-page/create! conn "Dec 16th, 2024" {})
page (d/entity @conn [:block/uuid page-uuid])
journal-day (:block/journal-day page)
expected-title (date-time-util/int->journal-title journal-day "yyyy-MM-dd EEEE")
expected-name (-> journal-day
(date-time-util/int->journal-title date-time-util/default-journal-title-formatter)
common-util/page-name-sanity-lc)]
(is (= expected-title (:block/title page))
"Journal title follows configured title format")
(is (= expected-name (:block/name page))
"Journal block/name keeps the default formatter for stable identity"))))
(deftest built-in-tag-must-not-convert-page-child-block-to-class-test
(let [conn (db-test/create-conn-with-blocks
{:pages-and-blocks [{:page {:block/title "page1"}}]})

View File

@@ -0,0 +1,93 @@
(ns frontend.worker.sync.upload-test
(:require [cljs.test :refer [async deftest is]]
[frontend.worker.sync.upload :as sync-upload]
[promesa.core :as p]
[clojure.string :as string]))
(deftest split-snapshot-rows-by-max-bytes-splits-rows-into-byte-capped-batches-test
(let [sizes {:a 4
:b 4
:c 4
:d 4}
rows [[:a] [:b] [:c] [:d]]]
(with-redefs [sync-upload/snapshot-rows-byte-length
(fn [rows']
(reduce + (map (fn [[addr]] (get sizes addr 0)) rows')))]
(is (= [[[:a] [:b]]
[[:c] [:d]]]
(#'sync-upload/split-snapshot-rows-by-max-bytes rows 8))))))
(deftest split-snapshot-rows-by-max-bytes-fails-fast-for-oversized-single-row-test
(let [sizes {:ok 3
:too-big 11}
rows [[:ok] [:too-big]]]
(with-redefs [sync-upload/snapshot-rows-byte-length
(fn [rows']
(reduce + (map (fn [[addr]] (get sizes addr 0)) rows')))]
(try
(#'sync-upload/split-snapshot-rows-by-max-bytes rows 10)
(is false "expected snapshot row too large error")
(catch :default error
(let [data (ex-data error)]
(is (= "snapshot-row-too-large" (ex-message error)))
(is (= 10 (:max-bytes data)))
(is (= 11 (:row-size data)))
(is (= :too-big (:addr data)))))))))
(deftest upload-snapshot-rows-batches-sets-reset-and-finished-flags-correctly-test
(async done
(let [calls* (atom [])
rows-batches [[[1 "a" nil]]
[[2 "b" nil]]
[[3 "c" nil]]]]
(-> (p/with-redefs [sync-upload/<snapshot-upload-body
(fn [rows]
(p/resolved {:body rows
:encoding nil}))]
(#'sync-upload/<upload-snapshot-rows-batches!
rows-batches
{:base "https://sync.example.test"
:graph-id "graph-1"
:first-batch? true
:finished? true
:checksum "abc+123="
:auth-fetch-f
(fn [url headers body]
(swap! calls* conj {:url url
:headers headers
:body body})
(p/resolved true))}))
(p/then
(fn [_]
(is (= 3 (count @calls*)))
(is (string/includes? (:url (nth @calls* 0)) "reset=true"))
(is (string/includes? (:url (nth @calls* 0)) "finished=false"))
(is (string/includes? (:url (nth @calls* 1)) "reset=false"))
(is (string/includes? (:url (nth @calls* 1)) "finished=false"))
(is (string/includes? (:url (nth @calls* 2)) "reset=false"))
(is (string/includes? (:url (nth @calls* 2)) "finished=true"))
(is (string/includes? (:url (nth @calls* 2)) "checksum=abc%2B123%3D"))
(done)))
(p/catch
(fn [error]
(is false (str error))
(done)))))))
(deftest drop-oversized-upload-datoms-drops-large-tldraw-page-values-test
(let [datoms [{:e 1 :a :block/title :v "safe"}
{:e 2 :a :logseq.property.tldraw/page :v {:id "small"}}
{:e 3 :a :logseq.property.tldraw/page :v {:id "huge"}}]]
(with-redefs [sync-upload/datom-value-byte-length
(fn [value]
(case (:id value)
"small" 32
"huge" 1500000
0))]
(let [{:keys [kept dropped]} (#'sync-upload/drop-oversized-upload-datoms datoms)]
(is (= 2 (count kept)))
(is (= [1 2] (mapv :e kept)))
(is (= 1 (count dropped)))
(is (= {:a :logseq.property.tldraw/page
:e 3
:bytes 1500000}
(first dropped)))))))