Merge branch 'feat/db' into feat/capacitor-new

This commit is contained in:
charlie
2025-05-16 15:40:05 +08:00
26 changed files with 459 additions and 401 deletions

5
deps/common/nbb.edn vendored
View File

@@ -1,4 +1,5 @@
{:paths ["src" "resources"]
:deps
{io.github.nextjournal/nbb-test-runner
{:git/sha "60ed57aa04bca8d604f5ba6b28848bd887109347"}}}
;; TODO: Remove fork when https://github.com/nextjournal/nbb-test-runner/pull/2 is merged
{io.github.colin-p-hill/nbb-test-runner
{:git/sha "def2cbdb5b3a0e1612b28bf64f5d869c27c733d3"}}}

View File

@@ -1,5 +1,7 @@
{:meta/version 1
;; == FILE GRAPH CONFIG ==
;;
;; Set the preferred format.
;; This is _only_ for file graphs.
;; Available options:
@@ -42,6 +44,102 @@
;; Default value: "yyyy_MM_dd"
;; :journal/file-name-format "yyyy_MM_dd"
;; Set the default location for storing notes.
;; This is _only_ for file graphs.
;; Default value: "pages"
;; :pages-directory "pages"
;; Set the default location for storing journals.
;; This is _only_ for file graphs.
;; Default value: "journals"
;; :journals-directory "journals"
;; Set the default location for storing whiteboards.
;; This is _only_ for file graphs.
;; Default value: "whiteboards"
;; :whiteboards-directory "whiteboards"
;; Enabling this option converts
;; [[Grant Ideas]] to [[file:./grant_ideas.org][Grant Ideas]] for org-mode.
;; For more information, visit https://github.com/logseq/logseq/issues/672
;; This is _only_ for file graphs.
;; :org-mode/insert-file-link? false
;; Favorites to list on the left sidebar
;; This is _only_ for file graphs.
:favorites []
;; Set flashcards interval.
;; This is _only_ for file graphs.
;; Expected value:
;; - Float between 0 and 1
;; higher values result in faster changes to the next review interval.
;; Default value: 0.5
;; :srs/learning-fraction 0.5
;; Set the initial interval after the first successful review of a card.
;; This is _only_ for file graphs.
;; Default value: 4
;; :srs/initial-interval 4
;; Hide specific block properties.
;; This is _only_ for file graphs.
;; Example usage:
;; :block-hidden-properties #{:public :icon}
;; Create a page for all properties.
;; This is _only_ for file graphs.
;; Default value: true
:property-pages/enabled? true
;; Properties to exclude from having property pages
;; This is _only_ for file graphs.
;; Example usage:
;; :property-pages/excludelist #{:duration :author}
;; By default, property value separated by commas will not be treated as
;; page references. You can add properties to enable it.
;; This is _only_ for file graphs.
;; Example usage:
;; :property/separated-by-commas #{:alias :tags}
;; Properties that are ignored when parsing property values for references
;; This is _only_ for file graphs.
;; Example usage:
;; :ignored-page-references-keywords #{:author :website}
;; logbook configuration.
;; This is _only_ for file graphs.
;; :logbook/settings
;; {:with-second-support? false ;limit logbook to minutes, seconds will be eliminated
;; :enabled-in-all-blocks true ;display logbook in all blocks after timetracking
;; :enabled-in-timestamped-blocks false ;don't display logbook at all
;; }
;; File sync options
;; Ignore these files when syncing, regexp is supported.
;; This is _only_ for file graphs.
;; :file-sync/ignore-files []
;; Configure the escaping method for special characters in page titles.
;; This is _only_ for file graphs.
;; Warning:
;; This is a dangerous operation. To modify the setting,
;; you'll need to manually rename all affected files and
;; re-index them on all clients after synchronization.
;; Incorrect handling may result in messy page titles.
;; Available options:
;; - :triple-lowbar (default)
;; ;use triple underscore `___` for slash `/` in page title
;; ;use Percent-encoding for other invalid characters
:file/name-format :triple-lowbar
;; == END OF FILE GRAPH CONFIG ==
;; Hide empty block properties
;; This is _only_ for DB graphs.
;; Default value: false
;; :ui/hide-empty-properties? false
;; Enable tooltip preview on hover.
;; Default value: true
:ui/enable-tooltip? true
@@ -58,11 +156,6 @@
;; Default value: true
:ui/auto-expand-block-refs? true
;; Hide empty block properties
;; This is _only_ for DB graphs.
;; Default value: false
;; :ui/hide-empty-properties? false
;; Disable accent marks when searching.
;; After changing this setting, rebuild the search index by pressing (^C ^S).
;; Default value: true
@@ -137,27 +230,6 @@
;; 3. Set "home" as the home page and display multiple pages in the right sidebar:
;; :default-home {:page "home", :sidebar ["Page A" "Page B"]}
;; Set the default location for storing notes.
;; This is _only_ for file graphs.
;; Default value: "pages"
;; :pages-directory "pages"
;; Set the default location for storing journals.
;; This is _only_ for file graphs.
;; Default value: "journals"
;; :journals-directory "journals"
;; Set the default location for storing whiteboards.
;; This is _only_ for file graphs.
;; Default value: "whiteboards"
;; :whiteboards-directory "whiteboards"
;; Enabling this option converts
;; [[Grant Ideas]] to [[file:./grant_ideas.org][Grant Ideas]] for org-mode.
;; For more information, visit https://github.com/logseq/logseq/issues/672
;; This is _only_ for file graphs.
;; :org-mode/insert-file-link? false
;; Configure custom shortcuts.
;; Syntax:
;; 1. + indicates simultaneous key presses, e.g., `Ctrl+Shift+a`.
@@ -274,58 +346,6 @@
;; :charge-strength -600 ; Default value: -600
;; :charge-range 600} ; Default value: 600
;; Favorites to list on the left sidebar
;; This is _only_ for file graphs.
:favorites []
;; Set flashcards interval.
;; This is _only_ for file graphs.
;; Expected value:
;; - Float between 0 and 1
;; higher values result in faster changes to the next review interval.
;; Default value: 0.5
;; :srs/learning-fraction 0.5
;; Set the initial interval after the first successful review of a card.
;; This is _only_ for file graphs.
;; Default value: 4
;; :srs/initial-interval 4
;; Hide specific block properties.
;; This is _only_ for file graphs.
;; Example usage:
;; :block-hidden-properties #{:public :icon}
;; Create a page for all properties.
;; This is _only_ for file graphs.
;; Default value: true
:property-pages/enabled? true
;; Properties to exclude from having property pages
;; This is _only_ for file graphs.
;; Example usage:
;; :property-pages/excludelist #{:duration :author}
;; By default, property value separated by commas will not be treated as
;; page references. You can add properties to enable it.
;; This is _only_ for file graphs.
;; Example usage:
;; :property/separated-by-commas #{:alias :tags}
;; Properties that are ignored when parsing property values for references
;; This is _only_ for file graphs.
;; Example usage:
;; :ignored-page-references-keywords #{:author :website}
;; logbook configuration.
;; This is _only_ for file graphs.
;; :logbook/settings
;; {:with-second-support? false ;limit logbook to minutes, seconds will be eliminated
;; :enabled-in-all-blocks true ;display logbook in all blocks after timetracking
;; :enabled-in-timestamped-blocks false ;don't display logbook at all
;; }
;; Mobile photo upload configuration.
;; :mobile/photo
;; {:allow-editing? true
@@ -375,11 +395,6 @@
;; :redirect-page? false ;; Default value: false
;; :default-page "quick capture"} ;; Default page: "quick capture"
;; File sync options
;; Ignore these files when syncing, regexp is supported.
;; This is _only_ for file graphs.
;; :file-sync/ignore-files []
;; Configure the Enter key behavior for
;; context-aware editing with DWIM (Do What I Mean).
;; context-aware Enter key behavior implies that pressing Enter will
@@ -393,16 +408,4 @@
;; :page-ref? true ;; Default value: true
;; :properties? true ;; Default value: true
;; :list? false} ;; Default value: false
;; Configure the escaping method for special characters in page titles.
;; This is _only_ for file graphs.
;; Warning:
;; This is a dangerous operation. To modify the setting,
;; you'll need to manually rename all affected files and
;; re-index them on all clients after synchronization.
;; Incorrect handling may result in messy page titles.
;; Available options:
;; - :triple-lowbar (default)
;; ;use triple underscore `___` for slash `/` in page title
;; ;use Percent-encoding for other invalid characters
:file/name-format :triple-lowbar}
}

View File

@@ -1,5 +1,5 @@
(ns logseq.common.config
"Common config and constants that are shared between deps and app"
"Common config constants and fns that are shared between deps and app"
(:require [clojure.string :as string]
[goog.object :as gobj]))
@@ -115,3 +115,43 @@
"*"
"-")))
(defn create-config-for-db-graph
"Given a new config.edn file string, creates a config.edn for use with only DB graphs"
[config]
(string/replace config #"(?m)[\s]*;; == FILE GRAPH CONFIG ==(?:.|\n)*?;; == END OF FILE GRAPH CONFIG ==\n?" ""))
(def file-only-config
"File only config keys that are deprecated in DB graphs along with
descriptions for their deprecation."
(merge
(zipmap
[:file/name-format
:file-sync/ignore-files
:hidden
:ignored-page-references-keywords
:journal/file-name-format
:journal/page-title-format
:journals-directory
:logbook/settings
:org-mode/insert-file-link?
:pages-directory
:preferred-workflow
:property/separated-by-commas
:property-pages/excludelist
:srs/learning-fraction
:srs/initial-interval
:whiteboards-directory]
(repeat "is not used in DB graphs"))
{:preferred-format
"is not used in DB graphs as there is only markdown mode."
:property-pages/enabled?
"is not used in DB graphs as all properties have pages"
:block-hidden-properties
"is not used in DB graphs as hiding a property is done in its configuration"
:feature/enable-block-timestamps?
"is not used in DB graphs as it is always enabled"
:favorites
"is not stored in config for DB graphs"
:default-templates
"is replaced by #Template and the `Apply template to tags` property"}))

View File

@@ -0,0 +1,45 @@
(ns logseq.common.config-test
(:require [cljs.test :refer [deftest is]]
[clojure.string :as string]
[logseq.common.config :as common-config]
#?(:org.babashka/nbb [nbb.classpath :as cp])
["fs" :as fs]
["path" :as node-path]))
(deftest remove-hidden-files
(let [files ["pages/foo.md" "pages/bar.md"
"script/README.md" "script/config.edn"
"dev/README.md" "dev/config.edn"]]
(is (= ["pages/foo.md" "pages/bar.md"]
#_:clj-kondo/ignore ;; buggy unresolved var
(common-config/remove-hidden-files
files
{:hidden ["script" "/dev"]}
identity))
"Removes hidden relative files")
(is (= ["/pages/foo.md" "/pages/bar.md"]
(common-config/remove-hidden-files
(map #(str "/" %) files)
{:hidden ["script" "/dev"]}
identity))
"Removes hidden files if they start with '/'")))
(defn find-on-classpath [classpath rel-path]
(some (fn [dir]
(let [f (node-path/join dir rel-path)]
(when (fs/existsSync f) f)))
(string/split classpath #":")))
#?(:org.babashka/nbb
(deftest create-config-for-db-graph
(let [original-config (some-> (find-on-classpath (cp/get-classpath) "templates/config.edn") fs/readFileSync str)
_ (assert original-config "config.edn must not be blank")
migrated-config (common-config/create-config-for-db-graph original-config)
forbidden-kws-regex (re-pattern (str (string/join "|" (keys common-config/file-only-config))))]
;; (println migrated-config)
(is (not (string/includes? migrated-config "== FILE ONLY CONFIG"))
"No longer includes file config header")
(assert (re-find forbidden-kws-regex original-config) "File config keys present in original config")
(is (not (re-find forbidden-kws-regex migrated-config))
"File config keys no longer present in migrated config"))))

View File

@@ -1,21 +0,0 @@
(ns logseq.common.config-test
(:require [logseq.common.config :as common-config]
[cljs.test :refer [deftest is]]))
(deftest remove-hidden-files
(let [files ["pages/foo.md" "pages/bar.md"
"script/README.md" "script/config.edn"
"dev/README.md" "dev/config.edn"]]
(is (= ["pages/foo.md" "pages/bar.md"]
(common-config/remove-hidden-files
files
{:hidden ["script" "/dev"]}
identity))
"Removes hidden relative files")
(is (= ["/pages/foo.md" "/pages/bar.md"]
(common-config/remove-hidden-files
(map #(str "/" %) files)
{:hidden ["script" "/dev"]}
identity))
"Removes hidden files if they start with '/'")))

1
deps/db/deps.edn vendored
View File

@@ -12,6 +12,7 @@
logseq/common {:local/root "../common"}
logseq/clj-fractional-indexing {:git/url "https://github.com/logseq/clj-fractional-indexing"
:sha "7182b7878410f78536dc2b6df35ed32ef9cd6b61"}
borkdude/rewrite-edn {:mvn/version "0.4.9"}
metosin/malli {:mvn/version "0.16.1"}
medley/medley {:mvn/version "1.4.0"}}

2
deps/db/nbb.edn vendored
View File

@@ -4,6 +4,8 @@
{:local/root "../common"}
medley/medley {:mvn/version "1.4.0"}
metosin/malli {:mvn/version "0.16.1"}
;; Used by db scripts with outliner.cli
borkdude/rewrite-edn {:mvn/version "0.4.9"}
logseq/clj-fractional-indexing {:git/url "https://github.com/logseq/clj-fractional-indexing"
:sha "7182b7878410f78536dc2b6df35ed32ef9cd6b61"}
io.github.nextjournal/nbb-test-runner

View File

@@ -3,9 +3,9 @@
(:require [clojure.set :as set]
[clojure.string :as string]
[datascript.core :as d]
[logseq.db.common.entity-plus :as entity-plus]
[logseq.db.common.order :as db-order]
[logseq.db.frontend.class :as db-class]
[logseq.db.common.entity-plus :as entity-plus]
[logseq.db.frontend.entity-util :as entity-util]
[logseq.db.frontend.property :as db-property]
[logseq.db.frontend.property.type :as db-property-type]
@@ -482,14 +482,10 @@
:file-block
(:logseq.property.history/block d)
:property-history-block
(:block/closed-value-property d)
:closed-value-block
(and (:logseq.property/created-from-property d)
(:logseq.property/value d))
(and (:logseq.property/created-from-property d) (:logseq.property/value d))
:property-value-block
(:block/uuid d)
:block
(= (:db/ident d) :logseq.property/empty-placeholder)

View File

@@ -803,21 +803,29 @@
set)]
(set/difference ref-uuids known-uuids)))
(defn- remove-namespaced-keys
"Removes keys from this ns for maps passed sqlite.build fns as they don't need to validate or use them"
[m]
(->> m
(remove (fn [[k _v]] (= "logseq.db.sqlite.export" (namespace k))))
(into {})))
(defn- ensure-export-is-valid
"Checks that export map is usable by sqlite.build including checking that
all referenced properties and classes are defined. Checks related to properties and
classes are disabled when :exclude-namespaces is set because those checks can't be done"
[export-map {:keys [graph-options]}]
(when-not (seq (:exclude-namespaces graph-options)) (sqlite-build/validate-options export-map))
(let [undefined-uuids (find-undefined-uuids export-map)
undefined (cond-> {}
(empty? (:exclude-namespaces graph-options))
(merge (find-undefined-classes-and-properties export-map))
(seq undefined-uuids)
(assoc :uuids undefined-uuids))]
(when (seq undefined)
(throw (ex-info (str "The following classes, uuids and properties are not defined: " (pr-str undefined))
undefined)))))
[export-map* {:keys [graph-options]}]
(let [export-map (remove-namespaced-keys export-map*)]
(when-not (seq (:exclude-namespaces graph-options)) (sqlite-build/validate-options export-map))
(let [undefined-uuids (find-undefined-uuids export-map)
undefined (cond-> {}
(empty? (:exclude-namespaces graph-options))
(merge (find-undefined-classes-and-properties export-map))
(seq undefined-uuids)
(assoc :uuids undefined-uuids))]
(when (seq undefined)
(throw (ex-info (str "The following classes, uuids and properties are not defined: " (pr-str undefined))
undefined))))))
(defn build-export
"Handles exporting db by given export-type"
@@ -838,30 +846,33 @@
(build-graph-export db (:graph-options options)))]
(if (get-in options [:graph-options :catch-validation-errors?])
(try
(ensure-export-is-valid (dissoc export-map ::block ::graph-files ::kv-values ::schema-version) options)
(ensure-export-is-valid export-map options)
(catch ExceptionInfo e
(println "Caught error:" e)))
(ensure-export-is-valid (dissoc export-map ::block ::graph-files ::kv-values ::schema-version) options))
(ensure-export-is-valid export-map options))
(assoc export-map ::export-type export-type)))
;; Import fns
;; ==========
(defn- add-uuid-to-page-if-exists
[db import-to-existing-page-uuids m]
(if-let [ent (some->> (:build/journal m)
(d/datoms db :avet :block/journal-day)
first
:e
(d/entity db))]
[db import-to-existing-page-uuids {:keys [existing-pages-keep-properties?]} m]
(if-let [ent (if (:build/journal m)
(some->> (:build/journal m)
(d/datoms db :avet :block/journal-day)
first
:e
(d/entity db))
;; TODO: For now only check page uniqueness by title. Could handle more uniqueness checks later
(some->> (:block/title m) (ldb/get-case-page db)))]
(do
(swap! import-to-existing-page-uuids assoc (:block/uuid m) (:block/uuid ent))
(assoc m :block/uuid (:block/uuid ent)))
;; TODO: For now only check page uniqueness by title. Could handle more uniqueness checks later
(if-let [ent (some->> (:block/title m) (ldb/get-case-page db))]
(do
(swap! import-to-existing-page-uuids assoc (:block/uuid m) (:block/uuid ent))
(assoc m :block/uuid (:block/uuid ent)))
m)))
(cond-> (assoc m :block/uuid (:block/uuid ent))
(and (:build/properties m) existing-pages-keep-properties?)
(update :build/properties (fn [props]
(->> props
(remove (fn [[k _v]] (get ent k)))
(into {}))))))
m))
(defn- update-existing-properties
"Updates existing properties by ident. Also check imported and existing properties have
@@ -884,7 +895,7 @@
(defn- check-for-existing-entities
"Checks export map for existing entities and adds :block/uuid to them if they exist in graph to import.
Also checks for property conflicts between existing properties and properties to be imported"
[db {:keys [pages-and-blocks classes properties] ::keys [export-type] :as export-map} property-conflicts]
[db {:keys [pages-and-blocks classes properties] ::keys [export-type import-options] :as export-map} property-conflicts]
(let [import-to-existing-page-uuids (atom {})
export-map
(cond-> {:build-existing-tx? true
@@ -892,7 +903,7 @@
(seq pages-and-blocks)
(assoc :pages-and-blocks
(mapv (fn [m]
(update m :page (partial add-uuid-to-page-if-exists db import-to-existing-page-uuids)))
(update m :page (partial add-uuid-to-page-if-exists db import-to-existing-page-uuids import-options)))
pages-and-blocks))
(seq classes)
(assoc :classes
@@ -915,7 +926,7 @@
(walk/postwalk (fn [f]
(if (and (vector? f) (= :build/page (first f)))
[:build/page
(add-uuid-to-page-if-exists db import-to-existing-page-uuids (second f))]
(add-uuid-to-page-if-exists db import-to-existing-page-uuids import-options (second f))]
f))
export-map))
;; Update uuid references of all pages that had their uuids updated to reference an existing page
@@ -948,6 +959,8 @@
* ::kv-values - Vec of :kv/value maps for a :graph export
* ::auto-include-namespaces - A set of parent namespaces to include from properties and classes
for a :graph export. See :exclude-namespaces in build-graph-export for a similar option
* ::import-options - A map of options that alters importing behavior. Has the following keys:
* :existing-pages-keep-properties? - Boolean which disables upsert of :build/properties on
This fn then returns a map of txs to transact with the following keys:
* :init-tx - Txs that must be transacted first, usually because they define new properties
@@ -969,7 +982,7 @@
{:error (str "The following imported properties conflict with the current graph: "
(pr-str (mapv :property-id @property-conflicts)))})
(if (= :graph (::export-type export-map''))
(-> (sqlite-build/build-blocks-tx (dissoc export-map'' ::graph-files ::kv-values ::export-type ::schema-version))
(-> (sqlite-build/build-blocks-tx (remove-namespaced-keys export-map''))
(assoc :misc-tx (vec (concat (::graph-files export-map'')
(::kv-values export-map'')))))
(sqlite-build/build-blocks-tx export-map'')))))
(sqlite-build/build-blocks-tx (remove-namespaced-keys export-map''))))))

View File

@@ -826,54 +826,65 @@
(-> (:classes imported-graph)
(medley/dissoc-in [:user.property/p1 :build/properties]))))))
(deftest build-import-can-import-existing-page-with-different-uuid
(defn- test-import-existing-page [import-options expected-page-properties]
(let [original-data
{:properties {:user.property/node {:logseq.property/type :node
:db/cardinality :db.cardinality/many}}
:pages-and-blocks
[{:page {:block/title "page1"
:build/properties {:user.property/node #{[:build/page {:block/title "node1"}]}}}}]}
:build/properties {:user.property/node
#{[:build/page {:block/title "existing page"
:build/properties {:logseq.property/description "first description"}}]}}}}]}
conn (db-test/create-conn-with-blocks original-data)
page-uuid (:block/uuid (db-test/find-page-by-title @conn "node1"))
page-uuid (:block/uuid (db-test/find-page-by-title @conn "existing page"))
_ (validate-db @conn)
;; This is just a temp uuid used to link to the page during import
temp-uuid (random-uuid)
existing-data
import-data
{:properties {:user.property/node {:logseq.property/type :node
:db/cardinality :db.cardinality/many}}
:pages-and-blocks
[{:page {:block/title "node1"
[{:page {:block/title "existing page"
:block/uuid temp-uuid
:build/keep-uuid? true}}
:build/keep-uuid? true
:build/properties {:logseq.property/description "second description"
:logseq.property/exclude-from-graph-view true}}}
{:page {:block/title "page2"
:build/properties {:user.property/node #{[:block/uuid temp-uuid]}}}}]}
:build/properties {:user.property/node #{[:block/uuid temp-uuid]}}}}]
::sqlite-export/import-options import-options}
{:keys [init-tx block-props-tx] :as _txs}
(sqlite-export/build-import existing-data @conn {})
(sqlite-export/build-import import-data @conn {})
;; _ (cljs.pprint/pprint _txs)
_ (d/transact! conn init-tx)
_ (d/transact! conn block-props-tx)
_ (validate-db @conn)
expected-pages-and-blocks
[{:page
{:block/uuid page-uuid
:build/keep-uuid? true,
:block/title "node1"},
:blocks []}
{:page
{:build/properties
{:user.property/node
#{[:block/uuid page-uuid]}},
:block/title "page1"},
:blocks []}
{:page
{:build/properties
{:user.property/node
#{[:block/uuid page-uuid]}},
:block/title "page2"},
:blocks []}],
[{:block/uuid page-uuid
:build/keep-uuid? true,
:block/title "existing page"
:build/properties
expected-page-properties}
{:build/properties
{:user.property/node
#{[:block/uuid page-uuid]}},
:block/title "page1"}
{:build/properties
{:user.property/node
#{[:block/uuid page-uuid]}},
:block/title "page2"}]
exported-graph (sqlite-export/build-export @conn {:export-type :graph
:graph-options {:exclude-built-in-pages? true}})]
(is (= expected-pages-and-blocks
(:pages-and-blocks exported-graph))
"page uuid ('node1') is preserved across imports even when its assigned a temporary
uuid to relate it to other nodes")))
(map :page (:pages-and-blocks exported-graph)))
"page uuid of 'existing page' is preserved across imports even when its assigned a temporary
uuid to relate it to other nodes")))
(deftest build-import-can-import-existing-page-with-different-uuid
(testing "By default any properties passed to an existing page are upserted"
(test-import-existing-page {}
{:logseq.property/description "second description"
:logseq.property/exclude-from-graph-view true}))
(testing "With ::existing-pages-keep-properties?, existing properties on existing pages are not overwritten by imported data"
(test-import-existing-page {:existing-pages-keep-properties? true}
{:logseq.property/description "first description"
:logseq.property/exclude-from-graph-view true})))

View File

@@ -764,6 +764,7 @@
{:block block'' :properties-tx properties-tx}))
(defn- pretty-print-dissoc
"Remove list of keys from a given map string while preserving whitespace"
[s dissoc-keys]
(-> (reduce rewrite/dissoc
(rewrite/parse-string s)
@@ -1496,7 +1497,11 @@
<save-file default-save-file}}]
(-> (<read-file config-file)
(p/then #(p/do!
(<save-file repo-or-conn "logseq/config.edn" %)
(<save-file repo-or-conn
"logseq/config.edn"
;; Converts a file graph config.edn for use with DB graphs. Unlike common-config/create-config-for-db-graph,
;; manually dissoc deprecated keys for config to be valid
(pretty-print-dissoc % (keys common-config/file-only-config)))
(let [config (edn/read-string %)]
(when-let [title-format (or (:journal/page-title-format config) (:date-formatter config))]
(ldb/transact! repo-or-conn [{:db/ident :logseq.class/Journal

View File

@@ -693,7 +693,7 @@
(deftest-async export-config-file-sets-title-format
(p/let [conn (db-test/create-conn)
read-file #(p/do! (pr-str {:journal/page-title-format "yyyy-MM-dd"}))
read-file #(p/do! "{:journal/page-title-format \"yyyy-MM-dd\"}")
_ (gp-exporter/export-config-file conn "logseq/config.edn" read-file {})]
(is (= "yyyy-MM-dd"
(:logseq.property.journal/title-format (d/entity @conn :logseq.class/Journal)))

View File

@@ -1,13 +1,15 @@
(ns ^:node-only logseq.outliner.cli
"Primary ns for outliner CLI fns"
(:require [clojure.string :as string]
(:require [borkdude.rewrite-edn :as rewrite]
[clojure.string :as string]
[datascript.core :as d]
[logseq.db.sqlite.create-graph :as sqlite-create-graph]
[logseq.db.sqlite.build :as sqlite-build]
[logseq.db.common.sqlite-cli :as sqlite-cli]
[logseq.outliner.db-pipeline :as db-pipeline]
["fs" :as fs]
["path" :as node-path]))
["path" :as node-path]
[logseq.common.config :as common-config]))
(defn- find-on-classpath [classpath rel-path]
(some (fn [dir]
@@ -15,6 +17,15 @@
(when (fs/existsSync f) f)))
(string/split classpath #":")))
(defn- pretty-print-merge
"Merge map into string while preversing whitespace"
[s m]
(-> (reduce (fn [acc [k v]]
(rewrite/assoc acc k v))
(rewrite/parse-string s)
m)
str))
(defn- setup-init-data
"Setup initial data same as frontend.handler.repo/create-db"
[conn {:keys [additional-config classpath import-type]
@@ -23,11 +34,10 @@
(cond-> (or (some-> (find-on-classpath classpath "templates/config.edn") fs/readFileSync str)
(do (println "Setting graph's config to empty since no templates/config.edn was found.")
"{}"))
true
(common-config/create-config-for-db-graph)
additional-config
;; TODO: Replace with rewrite-clj when it's available
(string/replace-first #"(:file/name-format :triple-lowbar)"
(str "$1 "
(string/replace-first (str additional-config) #"^\{(.*)\}$" "$1"))))]
(pretty-print-merge additional-config))]
(d/transact! conn (sqlite-create-graph/build-db-initial-data config-content {:import-type import-type}))))
(defn init-conn

View File

@@ -62,7 +62,6 @@
[frontend.mobile.util :as mobile-util]
[frontend.modules.outliner.tree :as tree]
[frontend.modules.shortcut.utils :as shortcut-utils]
[frontend.util.ref :as ref]
[frontend.security :as security]
[frontend.state :as state]
[frontend.template :as template]
@@ -70,6 +69,7 @@
[frontend.util :as util]
[frontend.util.file-based.clock :as clock]
[frontend.util.file-based.drawer :as drawer]
[frontend.util.ref :as ref]
[frontend.util.text :as text-util]
[goog.dom :as gdom]
[goog.functions :refer [debounce]]
@@ -939,13 +939,13 @@
"Component for a page. `page` argument contains :block/name which can be (un)sanitized page name.
Keys for `config`:
- `:preview?`: Is this component under preview mode? (If true, `page-preview-trigger` won't be registered to this `page-cp`)"
[state {:keys [label children preview? disable-preview? show-non-exists-page? table-view? tag? _skip-async-load?] :as config} page]
[state {:keys [label children preview? disable-preview? show-non-exists-page? tag? _skip-async-load?] :as config} page]
(when-let [entity' (rum/react (:*entity state))]
(let [entity (or (db/sub-block (:db/id entity')) entity')
config (assoc config :block entity)]
(cond
entity
(if (ldb/page? entity)
(if (or (ldb/page? entity) (not (:block/page entity)))
(let [page-name (some-> (:block/title entity) util/page-name-sanity-lc)
whiteboard-page? (model/whiteboard-page? entity)
inner (page-inner (assoc config :whiteboard-page? whiteboard-page?) entity children label)
@@ -962,10 +962,7 @@
(gp-mldoc/inline->edn label (mldoc/get-default-config :markdown))
label)))
(and (:block/name page) (util/uuid-string? (:block/name page)))
(invalid-node-ref (:block/name page))
(and (:block/name page) (or show-non-exists-page? table-view?))
(and (:block/name page) show-non-exists-page?)
(page-inner config (merge
{:block/title (:block/name page)
:block/name (:block/name page)}

View File

@@ -81,7 +81,9 @@
(string/replace input #"^#+" ""))
(defn create-items [q]
(when (and (not (string/blank? q)) (not config/publishing?))
(when (and (not (string/blank? q))
(not (#{"config.edn" "custom.js" "custom.css"} q))
(not config/publishing?))
(let [class? (string/starts-with? q "#")]
(->> [{:text (if class? "Create tag" "Create page") :icon "new-page"
:icon-theme :gray
@@ -236,6 +238,25 @@
(hash-map :status :success :items)
(swap! !results update group merge)))))
(defmethod load-results :recently-updated-pages [group state]
(let [!input (::input state)
!results (::results state)]
(swap! !results assoc-in [group :status] :loading)
(let [recent-pages (ldb/get-recent-updated-pages (db/get-db))
search-results (if (string/blank? @!input)
recent-pages
(search/fuzzy-search recent-pages @!input {:extract-fn :block/title}))]
(->> search-results
(map (fn [block]
(let [text (block-handler/block-unique-title block)
icon (get-page-icon block)]
{:icon icon
:icon-theme :gray
:text text
:source-block block})))
(hash-map :status :success :items)
(swap! !results update group merge)))))
(defn highlight-content-query
"Return hiccup of highlighted content FTS result"
[content q]
@@ -396,6 +417,7 @@
(load-results :nodes state)
(load-results :filters state)
(load-results :files state)
(load-results :recently-updated-pages state)
;; (load-results :recents state)
)))))

View File

@@ -370,8 +370,7 @@
;; config file options
:default-config config/config-default-content
:<save-config-file (fn save-config-file [_ path content]
(let [migrated-content (repo-handler/migrate-db-config content)]
(db-editor-handler/save-file! path migrated-content)))
(db-editor-handler/save-file! path content))
;; logseq file options
:<save-logseq-file (fn save-logseq-file [_ path content]
(db-editor-handler/save-file! path content))

View File

@@ -43,6 +43,7 @@
(views/view
{:view-parent page-entity
:view-feature-type :linked-references
:show-items-count? true
:additional-actions [reference-filter]
:columns (views/build-columns config [] {})
:config config})))

View File

@@ -7,7 +7,8 @@
[lambdaisland.glogi :as log]
[frontend.handler.notification :as notification]
[goog.string :as gstring]
[reitit.frontend.easy :as rfe]))
[reitit.frontend.easy :as rfe]
[logseq.common.config :as common-config]))
(defn- humanize-more
"Make error maps from me/humanize more readable for users. Doesn't try to handle
@@ -86,40 +87,6 @@ nested keys or positional errors e.g. tuples"
:else
(validate-config-map parsed-body schema path))))
(def file-only-config
"File only config that is deprecated in DB graphs"
(merge
(zipmap
[:file/name-format
:file-sync/ignore-files
:hidden
:ignored-page-references-keywords
:journal/file-name-format
:journal/page-title-format
:journals-directory
:logbook/settings
:org-mode/insert-file-link?
:pages-directory
:preferred-workflow
:property/separated-by-commas
:property-pages/excludelist
:srs/learning-fraction
:srs/initial-interval
:whiteboards-directory]
(repeat "is not used in DB graphs"))
{:preferred-format
"is not used in DB graphs as there is only markdown mode."
:property-pages/enabled?
"is not used in DB graphs as all properties have pages"
:block-hidden-properties
"is not used in DB graphs as hiding a property is done in its configuration"
:feature/enable-block-timestamps?
"is not used in DB graphs as it is always enabled"
:favorites
"is not stored in config for DB graphs"
:default-templates
"is replaced by #Template and the `Apply template to tags` property"}))
(defn detect-deprecations
"Detects config keys that will or have been deprecated"
[path content {:keys [db-graph?]}]
@@ -132,7 +99,7 @@ nested keys or positional errors e.g. tuples"
"is no longer supported."}
db-graph?
(merge
file-only-config))]
common-config/file-only-config))]
(cond
(= body ::failed-to-detect)
(log/info :msg "Skip deprecation check since config is not valid edn")

View File

@@ -50,6 +50,7 @@
[frontend.util.persist-var :as persist-var]
[goog.dom :as gdom]
[lambdaisland.glogi :as log]
[logseq.db.frontend.schema :as db-schema]
[promesa.core :as p]))
;; TODO: should we move all events here?
@@ -199,7 +200,10 @@
:user-id user-uuid
:graph-id graph-uuid
:tx-id tx-id
:db-based (config/db-based-graph? (state/get-current-repo)))]
:db-based (config/db-based-graph? (state/get-current-repo))
:schema-version db-schema/version
:db-schema-version (when-let [db (frontend.db/get-db)]
(:kv/value (frontend.db/entity db :logseq.kv/schema-version))))]
(Sentry/captureException error
(bean/->js {:tags payload}))))

View File

@@ -1,8 +1,7 @@
(ns frontend.handler.repo
"System-component-like ns that manages user's repos/graphs"
(:refer-clojure :exclude [clone])
(:require [borkdude.rewrite-edn :as rewrite]
[cljs-bean.core :as bean]
(:require [cljs-bean.core :as bean]
[clojure.string :as string]
[electron.ipc :as ipc]
[frontend.config :as config]
@@ -11,7 +10,6 @@
[frontend.db.persist :as db-persist]
[frontend.db.react :as react]
[frontend.db.restore :as db-restore]
[frontend.handler.common.config-edn :as config-edn-common-handler]
[frontend.handler.global-config :as global-config-handler]
[frontend.handler.graph :as graph-handler]
[frontend.handler.notification :as notification]
@@ -27,7 +25,8 @@
[frontend.util :as util]
[frontend.util.fs :as util-fs]
[frontend.util.text :as text-util]
[promesa.core :as p]))
[promesa.core :as p]
[logseq.common.config :as common-config]))
;; Project settings should be checked in two situations:
;; 1. User changes the config.edn directly in logseq.com (fn: alter-file)
@@ -177,16 +176,9 @@
(let [full-graph-name (string/lower-case (str config/db-version-prefix graph-name))]
(some #(= (some-> (:url %) string/lower-case) full-graph-name) (state/get-repos))))
(defn migrate-db-config
[content]
(-> (reduce rewrite/dissoc
(rewrite/parse-string (str content))
(keys config-edn-common-handler/file-only-config))
str))
(defn- create-db [full-graph-name {:keys [file-graph-import?]}]
(->
(p/let [config (migrate-db-config config/config-default-content)
(p/let [config (common-config/create-config-for-db-graph config/config-default-content)
_ (persist-db/<new full-graph-name
(cond-> {:config config}
file-graph-import? (assoc :import-type :file-graph)))

View File

@@ -16,14 +16,14 @@
version)
:environment (if config/dev? "development" "production")
:initialScope {:tags
(merge
(when (not-empty config/revision)
{:revision config/revision})
(cond->
{:platform (cond
(util/electron?) "electron"
(mobile-util/native-platform?) "mobile"
:else "web")
:publishing config/publishing?})}
:publishing config/publishing?}
(not-empty config/revision)
(assoc :revision config/revision))}
;; :integrations [(new posthog/SentryIntegration posthog "logseq" 5311485)
;; (new BrowserTracing)]
:debug config/dev?

View File

@@ -2376,7 +2376,6 @@ Similar to re-frame subscriptions"
(defn set-highlight-recent-days!
[days]
(prn :debug :set :days days)
(reset! (:ui/highlight-recent-days @state) days)
(storage/set :ui/highlight-recent-days days))

View File

@@ -16,6 +16,7 @@
[logseq.db.common.order :as db-order]
[logseq.db.frontend.class :as db-class]
[logseq.db.frontend.content :as db-content]
[logseq.db.frontend.malli-schema :as db-malli-schema]
[logseq.db.frontend.property :as db-property]
[logseq.db.frontend.property.build :as db-property-build]
[logseq.db.frontend.schema :as db-schema]
@@ -1172,8 +1173,11 @@
(js/console.error e)
(throw e)))))))
(defn- build-invalid-tx [entity eid]
(defn- build-invalid-tx [db entity eid]
(cond
(nil? (db-malli-schema/entity-dispatch-key db entity))
[[:db/retractEntity eid]]
(:block/schema entity)
[[:db/retract eid :block/schema]]
@@ -1200,7 +1204,8 @@
(= #{:block/tx-id} (set (keys entity)))
[[:db/retractEntity (:db/id entity)]]
(and (seq (:block/refs entity))
(and (or (seq (:block/refs entity))
(:logseq.property.table/filters entity))
(not (or (:block/title entity) (:block/content entity) (:property.value/content entity))))
[[:db/retractEntity (:db/id entity)]]
@@ -1285,7 +1290,7 @@
[:db/retract (:db/id entity) k]))))))
(into {} entity))
eid (:db/id entity)
fix (build-invalid-tx entity eid)]
fix (build-invalid-tx db entity eid)]
(into fix wrong-choice)))
invalid-entity-ids)
distinct)]

View File

@@ -32,7 +32,6 @@
[frontend.worker.util :as worker-util]
[goog.object :as gobj]
[lambdaisland.glogi.console :as glogi-console]
[logseq.common.log :as log]
[logseq.common.util :as common-util]
[logseq.db :as ldb]
[logseq.db.common.entity-plus :as entity-plus]
@@ -101,38 +100,39 @@
[^js pool data]
(.importDb ^js pool repo-path data))
(defn- get-all-datoms-from-sqlite-db
[db]
(some->> (.exec db #js {:sql "select * from kvs"
:rowMode "array"})
bean/->clj
(mapcat
(fn [[_addr content _addresses]]
(let [content' (sqlite-util/transit-read content)
datoms (when (map? content')
(:keys content'))]
datoms)))
distinct
(map (fn [[e a v t]]
(d/datom e a v t)))))
(comment
(defn- get-all-datoms-from-sqlite-db
[db]
(some->> (.exec db #js {:sql "select * from kvs"
:rowMode "array"})
bean/->clj
(mapcat
(fn [[_addr content _addresses]]
(let [content' (sqlite-util/transit-read content)
datoms (when (map? content')
(:keys content'))]
datoms)))
distinct
(map (fn [[e a v t]]
(d/datom e a v t)))))
(defn- rebuild-db-from-datoms!
"Persistent-sorted-set has been broken, used addresses can't be found"
[datascript-conn sqlite-db import-type]
(let [datoms (get-all-datoms-from-sqlite-db sqlite-db)
db (d/init-db [] db-schema/schema
{:storage (storage/storage @datascript-conn)})
db (d/db-with db
(map (fn [d]
[:db/add (:e d) (:a d) (:v d) (:t d)]) datoms))]
(prn :debug :rebuild-db-from-datoms :datoms-count (count datoms))
(defn- rebuild-db-from-datoms!
"Persistent-sorted-set has been broken, used addresses can't be found"
[datascript-conn sqlite-db import-type]
(let [datoms (get-all-datoms-from-sqlite-db sqlite-db)
db (d/init-db [] db-schema/schema
{:storage (storage/storage @datascript-conn)})
db (d/db-with db
(map (fn [d]
[:db/add (:e d) (:a d) (:v d) (:t d)]) datoms))]
(prn :debug :rebuild-db-from-datoms :datoms-count (count datoms))
;; export db first
(when-not import-type
(worker-util/post-message :notification ["The SQLite db will be exported to avoid any data-loss." :warning false])
(worker-util/post-message :export-current-db []))
(.exec sqlite-db #js {:sql "delete from kvs"})
(d/reset-conn! datascript-conn db)
(db-migrate/fix-db! datascript-conn)))
(when-not import-type
(worker-util/post-message :notification ["The SQLite db will be exported to avoid any data-loss." :warning false])
(worker-util/post-message :export-current-db []))
(.exec sqlite-db #js {:sql "delete from kvs"})
(d/reset-conn! datascript-conn db)
(db-migrate/fix-db! datascript-conn))))
(comment
(defn- gc-kvs-table!
@@ -158,78 +158,74 @@
:bind #js [addr]}))))))))
(defn- find-missing-addresses
[^Object db & {:keys [delete-addrs upsert-addr-content? open-db?]}]
(worker-util/profile
"find-missing-addresses"
(let [schema (some->> (.exec db #js {:sql "select content from kvs where addr = 0"
:rowMode "array"})
bean/->clj
ffirst
sqlite-util/transit-read)
result (->> (.exec db #js {:sql "select addr, addresses from kvs"
:rowMode "array"})
bean/->clj
(keep (fn [[addr addresses]]
(when-not (and delete-addrs (delete-addrs addr))
[addr (bean/->clj (js/JSON.parse addresses))]))))
used-addresses (-> (set (concat (mapcat second result)
[0 1 (:eavt schema) (:avet schema) (:aevt schema)]))
(clojure.set/difference delete-addrs))
missing-addresses (clojure.set/difference used-addresses (set (map first result)))]
(when (seq missing-addresses)
(prn :error :missing-addresses missing-addresses)
(if worker-util/dev?
(throw (ex-info "Found missing addresses that shouldn't happen" {:missing-addresses missing-addresses}))
(worker-util/post-message :capture-error
{:error "v2-db-missing-addresses"
:payload {:missing-addresses missing-addresses
:upsert-addr-content upsert-addr-content?
:open-db open-db?}})))
missing-addresses)))
[conn ^Object db & {:keys [delete-addrs]}]
(let [schema (some->> (.exec db #js {:sql "select content from kvs where addr = 0"
:rowMode "array"})
bean/->clj
ffirst
sqlite-util/transit-read)
result (->> (.exec db #js {:sql "select addr, addresses from kvs"
:rowMode "array"})
bean/->clj
(keep (fn [[addr addresses]]
(when-not (and delete-addrs (delete-addrs addr))
[addr (bean/->clj (js/JSON.parse addresses))]))))
used-addresses (-> (set (concat (mapcat second result)
[0 1 (:eavt schema) (:avet schema) (:aevt schema)]))
(clojure.set/difference delete-addrs))
missing-addresses (clojure.set/difference used-addresses (set (map first result)))]
(when (seq missing-addresses)
(let [version-in-db (when conn (db-schema/parse-schema-version (or (:kv/value (d/entity @conn :logseq.kv/schema-version)) 0)))
compare-result (when version-in-db (db-schema/compare-schema-version version-in-db "64.8"))]
(when (and compare-result (not (neg? compare-result))) ; >= 64.8
(worker-util/post-message :capture-error
{:error "db-missing-addresses-v2"
:payload {:missing-addresses missing-addresses}}))))
missing-addresses))
(defn upsert-addr-content!
"Upsert addr+data-seq. Update sqlite-cli/upsert-addr-content! when making changes"
[repo data delete-addrs* & {:keys [client-ops-db?] :or {client-ops-db? false}}]
(let [^Object db (worker-state/get-sqlite-conn repo (if client-ops-db? :client-ops :db))
delete-addrs (set delete-addrs*)]
[db data delete-addrs*]
(let [delete-addrs (clojure.set/difference (set delete-addrs*) #{0 1})]
(assert (some? db) "sqlite db not exists")
(.transaction db (fn [tx]
(doseq [item data]
(.exec tx #js {:sql "INSERT INTO kvs (addr, content, addresses) values ($addr, $content, $addresses) on conflict(addr) do update set content = $content, addresses = $addresses"
:bind item}))))
(when (seq delete-addrs)
(.transaction db (fn [tx]
(doseq [addr delete-addrs]
(.exec tx #js {:sql "Delete from kvs WHERE addr = ? AND NOT EXISTS (SELECT 1 FROM json_each(addresses) WHERE value = ?);"
:bind #js [addr]}))))
(let [missing-addrs (when worker-util/dev?
(seq (find-missing-addresses db {:delete-addrs delete-addrs
:upsert-addr-content? true})))
delete-addrs' (if missing-addrs
(remove (set missing-addrs) delete-addrs)
delete-addrs)]
(when (seq delete-addrs')
(.transaction db (fn [tx]
(doseq [addr delete-addrs']
(.exec tx #js {:sql "Delete from kvs WHERE addr = ? AND NOT EXISTS (SELECT 1 FROM json_each(addresses) WHERE value = ?);"
:bind #js [addr]})))))))))
(seq (find-missing-addresses nil db {:delete-addrs delete-addrs})))]
(if (seq missing-addrs)
(worker-util/post-message :notification [(str "Bug!! Missing addresses: " missing-addrs) :error false])
(when (seq delete-addrs)
(.transaction db (fn [tx]
(doseq [addr delete-addrs]
(.exec tx #js {:sql "Delete from kvs WHERE addr = ? AND NOT EXISTS (SELECT 1 FROM json_each(addresses) WHERE value = ?);"
:bind #js [addr]}))))))))))
(defn restore-data-from-addr
"Update sqlite-cli/restore-data-from-addr when making changes"
[repo addr & {:keys [client-ops-db?] :or {client-ops-db? false}}]
(let [^Object db (worker-state/get-sqlite-conn repo (if client-ops-db? :client-ops :db))]
(assert (some? db) "sqlite db not exists")
(when-let [result (-> (.exec db #js {:sql "select content, addresses from kvs where addr = ?"
:bind #js [addr]
:rowMode "array"})
first)]
(let [[content addresses] (bean/->clj result)
addresses (when addresses
(js/JSON.parse addresses))
data (sqlite-util/transit-read content)]
(if (and addresses (map? data))
(assoc data :addresses addresses)
data)))))
[db addr]
(assert (some? db) "sqlite db not exists")
(when-let [result (-> (.exec db #js {:sql "select content, addresses from kvs where addr = ?"
:bind #js [addr]
:rowMode "array"})
first)]
(let [[content addresses] (bean/->clj result)
addresses (when addresses
(js/JSON.parse addresses))
data (sqlite-util/transit-read content)]
(if (and addresses (map? data))
(assoc data :addresses addresses)
data))))
(defn new-sqlite-storage
"Update sqlite-cli/new-sqlite-storage when making changes"
[repo _opts]
[^Object db]
(reify IStorage
(-store [_ addr+data-seq delete-addrs]
(let [used-addrs (set (mapcat
@@ -249,36 +245,10 @@
:$content (sqlite-util/transit-write data')
:$addresses addresses}))
addr+data-seq)]
(upsert-addr-content! repo data delete-addrs)))
(upsert-addr-content! db data delete-addrs)))
(-restore [_ addr]
(restore-data-from-addr repo addr))))
(defn new-sqlite-client-ops-storage
[repo]
(reify IStorage
(-store [_ addr+data-seq delete-addrs]
(let [used-addrs (set (mapcat
(fn [[addr data]]
(cons addr
(when (map? data)
(:addresses data))))
addr+data-seq))
delete-addrs (remove used-addrs delete-addrs)
data (map
(fn [[addr data]]
(let [data' (if (map? data) (dissoc data :addresses) data)
addresses (when (map? data)
(when-let [addresses (:addresses data)]
(js/JSON.stringify (bean/->js addresses))))]
#js {:$addr addr
:$content (sqlite-util/transit-write data')
:$addresses addresses}))
addr+data-seq)]
(upsert-addr-content! repo data delete-addrs :client-ops-db? true)))
(-restore [_ addr]
(restore-data-from-addr repo addr :client-ops-db? true))))
(restore-data-from-addr db addr))))
(defn- close-db-aux!
[repo ^Object db ^Object search ^Object client-ops]
@@ -338,8 +308,9 @@
[repo {:keys [config import-type datoms]}]
(when-not (worker-state/get-sqlite-conn repo)
(p/let [[db search-db client-ops-db :as dbs] (get-dbs repo)
storage (new-sqlite-storage repo {})
client-ops-storage (when-not @*publishing? (new-sqlite-client-ops-storage repo))
storage (new-sqlite-storage db)
client-ops-storage (when-not @*publishing?
(new-sqlite-storage client-ops-db))
db-based? (sqlite-util/db-based-graph? repo)]
(swap! *sqlite-conns assoc repo {:db db
:search search-db
@@ -374,24 +345,14 @@
(when import-type {:import-type import-type}))]
(d/transact! conn initial-data {:initial-db? true})))
(try
;; TODO: remove this once we can ensure there's no bug for missing addresses
;; because it's slow for large graphs
(when-not import-type
(when-let [missing-addresses (seq (find-missing-addresses conn db))]
(worker-util/post-message :notification ["It seems that the DB has been broken, please export a backup and contact Logseq team for help." :error false])
(throw (ex-info "DB missing addresses" {:missing-addresses missing-addresses}))))
;; TODO: remove this once we can ensure there's no bug for missing addresses
;; because it's slow for large graphs
(when-not import-type
(when-let [missing-addresses (seq (find-missing-addresses db {:open-db? true}))]
(throw (ex-info "DB missing addresses" {:missing-addresses missing-addresses}))))
(db-migrate/migrate conn search-db)
;; TODO: Remove this once we can ensure there's no bug for missing addresses
(catch :default e
(log/error (str "DB migrate failed for " repo ", error:") e)
(if (= (.-message e) "DB missing addresses")
(do
(rebuild-db-from-datoms! conn db import-type)
(db-migrate/migrate conn search-db))
(throw e))))
(db-migrate/migrate conn search-db)
(db-listener/listen-db-changes! repo (get @*datascript-conns repo))))))

View File

@@ -107,9 +107,11 @@
(let [valid? (if (get-in tx-report [:tx-meta :reset-conn!])
true
(db-validate/validate-tx-report! tx-report (:validate-db-options context)))]
(when (and (get-in context [:validate-db-options :fail-invalid?]) (not valid?))
(shared-service/broadcast-to-clients! :notification
[["Invalid DB!"] :error]))))
(when-not valid?
(when (or (get-in context [:validate-db-options :fail-invalid?]) worker-util/dev?)
(shared-service/broadcast-to-clients! :notification
[["Invalid DB!"] :error]))
(throw (ex-info "Invalid data" {:graph repo})))))
;; Ensure :block/order is unique for any block that has :block/parent
(when (or (:dev? context) (exists? js/process))

View File

@@ -294,10 +294,13 @@
(.postMessage common-channel #js {:type "master-changed"
:master-client-id master-client-id
:serviceName service-name})
(p/do!
(on-become-master-handler service-name)
(<re-requests-in-flight-on-master! target)
(p/resolve! status-ready-deferred-p)))
(->
(p/do!
(on-become-master-handler service-name)
(<re-requests-in-flight-on-master! target))
(p/finally
(fn []
(p/resolve! status-ready-deferred-p)))))
(defn <create-service
"broadcast-data-types - For data matching these types,