mirror of
https://github.com/logseq/logseq.git
synced 2026-02-01 22:47:36 +00:00
Merge branch 'master' into refactor/apis-types
This commit is contained in:
@@ -30,11 +30,11 @@
|
||||
(util/input graph-name)
|
||||
(when enable-sync?
|
||||
(w/wait-for "button#rtc-sync" {:timeout 3000})
|
||||
(w/click "button#rtc-sync"))
|
||||
(w/click "button:not([disabled]):text(\"Submit\")")
|
||||
(when enable-sync?
|
||||
(w/click "button#rtc-sync")
|
||||
(input-e2ee-password)
|
||||
(w/wait-for "button.cloud.on.idle" {:timeout 20000}))
|
||||
(when-not enable-sync?
|
||||
(w/click "button:not([disabled]):text(\"Submit\")"))
|
||||
;; new graph can blocks the ui because the db need to be created and restored,
|
||||
;; I have no idea why `search-and-click` failed to auto-wait sometimes.
|
||||
(util/wait-timeout 1000))
|
||||
|
||||
9
deps/db/src/logseq/db/common/sqlite.cljs
vendored
9
deps/db/src/logseq/db/common/sqlite.cljs
vendored
@@ -4,8 +4,8 @@
|
||||
(:require ["path" :as node-path]
|
||||
[clojure.string :as string]
|
||||
[datascript.core :as d]
|
||||
[logseq.db.sqlite.util :as sqlite-util]
|
||||
[logseq.common.config :as common-config]))
|
||||
[logseq.common.config :as common-config]
|
||||
[logseq.db.sqlite.util :as sqlite-util]))
|
||||
|
||||
(defn create-kvs-table!
|
||||
"Creates a sqlite table for use with datascript.storage if one doesn't exist"
|
||||
@@ -40,3 +40,8 @@
|
||||
(let [db-name' (sanitize-db-name db-name)
|
||||
graph-dir (node-path/join graphs-dir db-name')]
|
||||
[db-name' (node-path/join graph-dir "db.sqlite")]))
|
||||
|
||||
(defn get-db-backups-path
|
||||
[graphs-dir db-name]
|
||||
(let [db-name' (sanitize-db-name db-name)]
|
||||
(node-path/join graphs-dir db-name' "backups")))
|
||||
|
||||
@@ -892,14 +892,15 @@
|
||||
;; {:url ["Complex" {:protocol "zotero", :link "select/library/items/6VCW9QFJ"}], :label [["Plain" "Dechow and Struppa - 2015 - Intertwingled.pdf"]], :full_text "[Dechow and Struppa - 2015 - Intertwingled.pdf](zotero://select/library/items/6VCW9QFJ)", :metadata ""}
|
||||
(defn- get-zotero-local-pdf-path
|
||||
[config m]
|
||||
(let [link (:link (second (:url m)))
|
||||
label (second (first (:label m)))
|
||||
id (last (string/split link #"/"))]
|
||||
(when (and link id label)
|
||||
(when-let [zotero-data-dir (get-in config [:zotero/settings-v2 "default" :zotero-data-directory])]
|
||||
{:link (str "zotero://" link)
|
||||
:path (node-path/join zotero-data-dir "storage" id label)
|
||||
:base label}))))
|
||||
(when (= "zotero" (:protocol (second (:url m))))
|
||||
(let [link (:link (second (:url m)))
|
||||
label (second (first (:label m)))
|
||||
id (last (string/split link #"/"))]
|
||||
(when (and link id label)
|
||||
(when-let [zotero-data-dir (get-in config [:zotero/settings-v2 "default" :zotero-data-directory])]
|
||||
{:link (str "zotero://" link)
|
||||
:path (node-path/join zotero-data-dir "storage" id label)
|
||||
:base label})))))
|
||||
|
||||
(defn- walk-ast-blocks
|
||||
"Walks each ast block in order to its full depth. Saves multiple ast types for
|
||||
@@ -1048,10 +1049,11 @@
|
||||
"Given an asset's relative or full path, create a unique name for identifying an asset.
|
||||
Must handle to paths as ../assets/*, assets/* and with subdirectories"
|
||||
[path]
|
||||
(or (re-find #"assets/.*$" path)
|
||||
;; pdf outside logseq graphs
|
||||
(when (string/ends-with? path ".pdf")
|
||||
path)))
|
||||
(when (string? path)
|
||||
(or (re-find #"assets/.*$" path)
|
||||
;; pdf outside logseq graphs
|
||||
(when (string/ends-with? path ".pdf")
|
||||
path))))
|
||||
|
||||
(defn- update-asset-links-in-block-title [block-title asset-name-to-uuids ignored-assets]
|
||||
(reduce (fn [acc [asset-name asset-uuid]]
|
||||
@@ -1213,11 +1215,13 @@
|
||||
(p/let [asset-maps* (p/all (map
|
||||
(fn [asset-link]
|
||||
(p/let [path* (-> asset-link second :url second)
|
||||
zotero-asset? (when (map? path*)
|
||||
(= "zotero" (:protocol (second (:url (second asset-link))))))
|
||||
{:keys [path link base]} (if (map? path*)
|
||||
(get-zotero-local-pdf-path user-config (second asset-link))
|
||||
{:path path*})
|
||||
asset-name (-> path asset-path->name)
|
||||
asset-link-or-name (or link (-> path asset-path->name))
|
||||
asset-name (some-> path asset-path->name)
|
||||
asset-link-or-name (or link (some-> path asset-path->name))
|
||||
asset-data* (when asset-link-or-name (get @assets asset-link-or-name))
|
||||
_ (when (and asset-link-or-name
|
||||
(not asset-data*)
|
||||
@@ -1261,7 +1265,7 @@
|
||||
(swap! assets assoc-in [asset-link-or-name :asset-created?] true)
|
||||
{:asset-name-uuid [asset-link-or-name (:block/uuid new-asset)]
|
||||
:asset-tx asset-tx}))
|
||||
(do
|
||||
(when-not zotero-asset? ; no need to report warning for zotero managed pdf files
|
||||
(swap! ignored-assets conj
|
||||
{:reason "No asset data found for this asset path"
|
||||
:path (-> asset-link second :url second)
|
||||
|
||||
5
deps/outliner/src/logseq/outliner/core.cljs
vendored
5
deps/outliner/src/logseq/outliner/core.cljs
vendored
@@ -791,7 +791,7 @@
|
||||
`update-timestamps?`: whether to update `blocks` timestamps.
|
||||
``"
|
||||
[repo db blocks target-block {:keys [_sibling? keep-uuid? keep-block-order?
|
||||
outliner-op replace-empty-target? update-timestamps?
|
||||
outliner-op outliner-real-op replace-empty-target? update-timestamps?
|
||||
insert-template?]
|
||||
:as opts
|
||||
:or {update-timestamps? true}}]
|
||||
@@ -810,7 +810,8 @@
|
||||
b)
|
||||
b)
|
||||
dissoc-keys (concat [:block/tx-id]
|
||||
(when (contains? #{:insert-template-blocks :paste} outliner-op)
|
||||
(when (and (contains? #{:insert-template-blocks :paste} outliner-op)
|
||||
(not (contains? #{:paste-text} outliner-real-op)))
|
||||
[:block/refs]))]
|
||||
(apply dissoc b' dissoc-keys))
|
||||
b))
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
(ns electron.backup-file
|
||||
(:require [clojure.string :as string]
|
||||
(:require ["fs" :as fs]
|
||||
["fs-extra" :as fs-extra]
|
||||
["path" :as node-path]
|
||||
["fs" :as fs]
|
||||
["fs-extra" :as fs-extra]))
|
||||
[clojure.string :as string]))
|
||||
|
||||
(def backup-dir "logseq/bak")
|
||||
(def version-file-dir "logseq/version-files/local")
|
||||
@@ -24,38 +24,142 @@
|
||||
[repo relative-path]
|
||||
(get-backup-dir* repo relative-path version-file-dir))
|
||||
|
||||
;; TODO: add interval support like days
|
||||
(defn- truncate-old-versioned-files!
|
||||
"reserve the latest 6 version files"
|
||||
[dir]
|
||||
(let [files (fs/readdirSync dir (clj->js {:withFileTypes true}))
|
||||
files (mapv #(.-name %) files)
|
||||
old-versioned-files (drop 6 (reverse (sort files)))]
|
||||
"reserve the latest `keep-versions` version files"
|
||||
[dir keep-versions]
|
||||
(let [entries (fs/readdirSync dir (clj->js {:withFileTypes true}))
|
||||
files (->> entries
|
||||
(filter #(.-isFile %))
|
||||
(mapv #(.-name %)))
|
||||
old-versioned-files (drop keep-versions (reverse (sort files)))]
|
||||
(doseq [file old-versioned-files]
|
||||
(fs-extra/removeSync (node-path/join dir file)))))
|
||||
|
||||
(defn- parse-backup-ts
|
||||
"Backup filenames are like: 2025-12-25T01_23_45.678Z.ext
|
||||
We turn '_' back into ':' and parse as ISO."
|
||||
[filename]
|
||||
(let [base (-> filename
|
||||
;; drop extension (keep last '.' part)
|
||||
(string/replace #"\.[^.]+$" "")
|
||||
(string/replace "_" ":"))
|
||||
ms (.parse js/Date base)]
|
||||
(when-not (js/isNaN ms) ms)))
|
||||
|
||||
(defn- truncate-daily-versioned-files!
|
||||
"Keep the latest `keep-versions` version files, but:
|
||||
- the newest 6 kept are deduped per-hour (keep newest file per hour)
|
||||
- the remaining kept (if any) are deduped per-day (keep newest file per day)
|
||||
|
||||
Example: keep-versions=12 => 6 hourly + 6 daily."
|
||||
[dir keep-versions]
|
||||
(let [keep-versions (max 0 (or keep-versions 0))
|
||||
keep-hourly (min 6 keep-versions)
|
||||
|
||||
;; list file names (ignore directories)
|
||||
dirents (fs/readdirSync dir (clj->js {:withFileTypes true}))
|
||||
files (->> dirents
|
||||
(filter #(.-isFile %))
|
||||
(mapv #(.-name %)))
|
||||
|
||||
;; sort newest -> oldest primarily by parsed timestamp; fall back to name
|
||||
files* (->> files
|
||||
(map (fn [n] {:name n :ts (or (parse-backup-ts n) -1)}))
|
||||
(sort-by (juxt (comp - :ts) :name))
|
||||
(mapv :name))
|
||||
|
||||
;; decide which files to keep
|
||||
keep-set
|
||||
(loop [xs files*
|
||||
kept #{}
|
||||
kept-count 0
|
||||
hour-seen #{}
|
||||
day-seen #{}]
|
||||
(if (or (empty? xs) (>= kept-count keep-versions))
|
||||
kept
|
||||
(let [f (first xs)
|
||||
ts (parse-backup-ts f)
|
||||
;; derive keys; if unparsable, treat as unique bucket
|
||||
hour-key (if ts
|
||||
(.toISOString (js/Date. (-> ts
|
||||
(js/Math.floor)
|
||||
(- (mod ts 3600000)))))
|
||||
(str "unparsable-hour:" f))
|
||||
day-key (if ts
|
||||
(.slice (.toISOString (js/Date. ts)) 0 10)
|
||||
(str "unparsable-day:" f))]
|
||||
(cond
|
||||
;; Phase 1: hourly buckets (newest 6 hours)
|
||||
(< (count hour-seen) keep-hourly)
|
||||
(if (contains? hour-seen hour-key)
|
||||
(recur (rest xs) kept kept-count hour-seen day-seen)
|
||||
(recur (rest xs)
|
||||
(conj kept f)
|
||||
(inc kept-count)
|
||||
(conj hour-seen hour-key)
|
||||
day-seen))
|
||||
|
||||
;; Phase 2: daily buckets (fill remaining up to keep-versions)
|
||||
:else
|
||||
(if (contains? day-seen day-key)
|
||||
(recur (rest xs) kept kept-count hour-seen day-seen)
|
||||
(recur (rest xs)
|
||||
(conj kept f)
|
||||
(inc kept-count)
|
||||
hour-seen
|
||||
(conj day-seen day-key)))))))
|
||||
|
||||
;; remove everything not in keep-set
|
||||
to-remove (remove keep-set files)]
|
||||
(doseq [file to-remove]
|
||||
(fs-extra/removeSync (node-path/join dir file)))))
|
||||
|
||||
(defn- latest-backup-info
|
||||
"Return {:name .. :ts .. :size ..} for the latest backup in dir, or nil.
|
||||
Prefers timestamp parsed from filename; falls back to file mtimeMs."
|
||||
[dir]
|
||||
(let [dirents (fs/readdirSync dir (clj->js {:withFileTypes true}))
|
||||
files (->> dirents (filter #(.-isFile %)) (map #(.-name %)))]
|
||||
(when (seq files)
|
||||
(->> files
|
||||
(map (fn [name]
|
||||
(let [p (node-path/join dir name)
|
||||
stat (fs/statSync p)
|
||||
ts (or (parse-backup-ts name) (.-mtimeMs stat))]
|
||||
{:name name
|
||||
:ts ts
|
||||
:size (.-size stat)})))
|
||||
(apply max-key :ts)))))
|
||||
|
||||
(defn- too-soon?
|
||||
[dir]
|
||||
(let [info (latest-backup-info dir)
|
||||
;; default: if using daily+hourly retention, don’t create more than 1 per hour
|
||||
min-interval-ms 3600000
|
||||
now-ms (.now js/Date)
|
||||
latest-backup-ts (:ts info)]
|
||||
(and latest-backup-ts
|
||||
(pos? min-interval-ms)
|
||||
(< (- now-ms latest-backup-ts) min-interval-ms))))
|
||||
|
||||
(defn backup-file
|
||||
"backup CONTENT under DIR :backup-dir or :version-file-dir
|
||||
:backup-dir = `backup-dir`
|
||||
:version-file-dir = `version-file-dir`"
|
||||
[repo dir relative-path ext content & {:keys [add-desktop? skip-backup-fn]
|
||||
:or {add-desktop? true}}]
|
||||
{:pre [(contains? #{:backup-dir :version-file-dir} dir)]}
|
||||
(let [dir* (case dir
|
||||
:backup-dir (get-backup-dir repo relative-path)
|
||||
:version-file-dir (get-version-file-dir repo relative-path))
|
||||
[repo dir relative-path ext content & {:keys [truncate-daily?
|
||||
keep-versions backups-dir]
|
||||
:or {keep-versions 6}}]
|
||||
(let [dir* (or backups-dir
|
||||
(case dir
|
||||
:backup-dir (get-backup-dir repo relative-path)
|
||||
:version-file-dir (get-version-file-dir repo relative-path)))
|
||||
_ (fs-extra/ensureDirSync dir*)
|
||||
backups (fs/readdirSync dir*)
|
||||
latest-backup-size (when (seq backups)
|
||||
(some->> (nth backups (dec (count backups)))
|
||||
(node-path/join dir*)
|
||||
(fs/statSync)
|
||||
(.-size)))]
|
||||
(when-not (and (fn? skip-backup-fn) latest-backup-size (skip-backup-fn latest-backup-size))
|
||||
(let [new-path (node-path/join dir*
|
||||
(str (string/replace (.toISOString (js/Date.)) ":" "_")
|
||||
(when add-desktop? ".Desktop")
|
||||
ext))]
|
||||
(fs/writeFileSync new-path content)
|
||||
(fs/statSync new-path)
|
||||
(truncate-old-versioned-files! dir*)))))
|
||||
new-path (node-path/join dir*
|
||||
(str (string/replace (.toISOString (js/Date.)) ":" "_")
|
||||
ext))]
|
||||
(when-not (and truncate-daily? (too-soon? dir*))
|
||||
(fs/writeFileSync new-path content)
|
||||
(fs/statSync new-path)
|
||||
(if truncate-daily?
|
||||
(truncate-daily-versioned-files! dir* keep-versions)
|
||||
(truncate-old-versioned-files! dir* keep-versions)))))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"Provides SQLite dbs for electron and manages files of those dbs"
|
||||
(:require ["fs-extra" :as fs]
|
||||
["path" :as node-path]
|
||||
[electron.backup-file :as backup-file]
|
||||
[logseq.cli.common.graph :as cli-common-graph]
|
||||
[logseq.common.config :as common-config]
|
||||
[logseq.db.common.sqlite :as common-sqlite]))
|
||||
@@ -17,11 +18,6 @@
|
||||
(fs/ensureDirSync graph-dir)
|
||||
graph-dir))
|
||||
|
||||
(defn save-db!
|
||||
[db-name data]
|
||||
(let [[_db-name db-path] (common-sqlite/get-db-full-path (cli-common-graph/get-db-graphs-dir) db-name)]
|
||||
(fs/writeFileSync db-path data)))
|
||||
|
||||
(defn get-db
|
||||
[db-name]
|
||||
(let [_ (ensure-graph-dir! db-name)
|
||||
@@ -29,6 +25,20 @@
|
||||
(when (fs/existsSync db-path)
|
||||
(fs/readFileSync db-path))))
|
||||
|
||||
(defn save-db!
|
||||
[db-name data]
|
||||
(let [[db-name db-path] (common-sqlite/get-db-full-path (cli-common-graph/get-db-graphs-dir) db-name)
|
||||
old-data (get-db db-name)
|
||||
backups-path (common-sqlite/get-db-backups-path (cli-common-graph/get-db-graphs-dir) db-name)]
|
||||
(when old-data
|
||||
(backup-file/backup-file db-name nil nil
|
||||
".sqlite"
|
||||
old-data
|
||||
{:backups-dir backups-path
|
||||
:truncate-daily? true
|
||||
:keep-versions 12}))
|
||||
(fs/writeFileSync db-path data)))
|
||||
|
||||
(defn unlink-graph!
|
||||
[repo]
|
||||
(let [db-name (common-sqlite/sanitize-db-name repo)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
(ns electron.git
|
||||
(:require ["dugite" :refer [GitProcess]]
|
||||
[goog.object :as gobj]
|
||||
["fs-extra" :as fs]
|
||||
["os" :as os]
|
||||
["path" :as node-path]
|
||||
[clojure.string :as string]
|
||||
[electron.logger :as logger]
|
||||
[electron.state :as state]
|
||||
[electron.utils :as utils]
|
||||
[electron.logger :as logger]
|
||||
[promesa.core :as p]
|
||||
[clojure.string :as string]
|
||||
["fs-extra" :as fs]
|
||||
["path" :as node-path]
|
||||
["os" :as os]))
|
||||
[goog.object :as gobj]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(def log-error (partial logger/error "[Git]"))
|
||||
|
||||
@@ -111,27 +111,30 @@
|
||||
|
||||
(defn add-all-and-commit-single-graph!
|
||||
[graph-path message]
|
||||
(let [message (if (string/blank? message)
|
||||
"Auto saved by Logseq"
|
||||
message)]
|
||||
(->
|
||||
(p/let [_ (init! graph-path)
|
||||
_ (add-all! graph-path)]
|
||||
(commit! graph-path message))
|
||||
(p/catch (fn [error]
|
||||
(when (and
|
||||
(string? error)
|
||||
(not (string/blank? error)))
|
||||
(if (string/starts-with? error "Author identity unknown")
|
||||
(utils/send-to-renderer "setGitUsernameAndEmail" {:type "git"})
|
||||
(utils/send-to-renderer "notification" {:type "error"
|
||||
:payload (str error "\nIf you don't want to see those errors or don't need git, you can disable the \"Git auto commit\" feature on Settings > Version control.")}))))))))
|
||||
;; Don't run git on db graphs
|
||||
(when (string/includes? graph-path "logseq_local_")
|
||||
(let [message (if (string/blank? message)
|
||||
"Auto saved by Logseq"
|
||||
message)]
|
||||
(->
|
||||
(p/let [_ (init! graph-path)
|
||||
_ (add-all! graph-path)]
|
||||
(commit! graph-path message))
|
||||
(p/catch (fn [error]
|
||||
(when (and
|
||||
(string? error)
|
||||
(not (string/blank? error)))
|
||||
(if (string/starts-with? error "Author identity unknown")
|
||||
(utils/send-to-renderer "setGitUsernameAndEmail" {:type "git"})
|
||||
(utils/send-to-renderer "notification" {:type "error"
|
||||
:payload (str error "\nIf you don't want to see those errors or don't need git, you can disable the \"Git auto commit\" feature on Settings > Version control.")})))))))))
|
||||
|
||||
(defn add-all-and-commit!
|
||||
([]
|
||||
(add-all-and-commit! nil))
|
||||
([message]
|
||||
(doseq [path (state/get-all-graph-paths)] (add-all-and-commit-single-graph! path message))))
|
||||
(doseq [path (state/get-all-graph-paths)]
|
||||
(add-all-and-commit-single-graph! path message))))
|
||||
|
||||
(defn short-status!
|
||||
[graph-path]
|
||||
|
||||
@@ -255,18 +255,20 @@
|
||||
[:code (if util/mac? "Cmd+Enter" "Ctrl+Enter")]
|
||||
[:span " to display this tag inline instead of at the end of this node."]])])))
|
||||
|
||||
(rum/defc page-search < rum/reactive
|
||||
{:will-unmount (fn [state]
|
||||
(rum/defcs page-search < rum/reactive
|
||||
{:init (fn [state]
|
||||
(assoc state ::pos (state/get-editor-last-pos)))
|
||||
:will-unmount (fn [state]
|
||||
(reset! commands/*current-command nil)
|
||||
state)}
|
||||
"Page or tag searching popup"
|
||||
[id format]
|
||||
[state id format]
|
||||
(let [action (state/sub :editor/action)
|
||||
db? (config/db-based-graph? (state/get-current-repo))
|
||||
embed? (and db? (= @commands/*current-command "Page embed"))
|
||||
tag? (= action :page-search-hashtag)
|
||||
db-tag? (and db? tag?)
|
||||
pos (state/get-editor-last-pos)
|
||||
pos (::pos state)
|
||||
input (gdom/getElement id)]
|
||||
(when input
|
||||
(let [current-pos (cursor/pos input)
|
||||
@@ -382,9 +384,11 @@
|
||||
(:block/title template))
|
||||
:class "black"})))
|
||||
|
||||
(rum/defc template-search < rum/reactive
|
||||
[id _format]
|
||||
(let [pos (state/get-editor-last-pos)
|
||||
(rum/defcs template-search < rum/reactive
|
||||
{:init (fn [state]
|
||||
(assoc state ::pos (state/get-editor-last-pos)))}
|
||||
[state id _format]
|
||||
(let [pos (::pos state)
|
||||
input (gdom/getElement id)]
|
||||
(when input
|
||||
(let [current-pos (cursor/pos input)
|
||||
@@ -467,11 +471,13 @@
|
||||
[last-pos current-pos])
|
||||
[:<>])
|
||||
|
||||
(rum/defc code-block-mode-picker < rum/reactive
|
||||
[id format]
|
||||
(rum/defcs code-block-mode-picker < rum/reactive
|
||||
{:init (fn [state]
|
||||
(assoc state ::pos (state/get-editor-last-pos)))}
|
||||
[state id format]
|
||||
(when-let [modes (some->> js/window.CodeMirror (.-modes) (js/Object.keys) (js->clj) (remove #(= "null" %)))]
|
||||
(when-let [^js input (gdom/getElement id)]
|
||||
(let [pos (state/get-editor-last-pos)
|
||||
(let [pos (::pos state)
|
||||
current-pos (cursor/pos input)
|
||||
edit-content (or (state/sub-edit-content) "")
|
||||
q (or (editor-handler/get-selected-text)
|
||||
|
||||
@@ -63,14 +63,14 @@
|
||||
{:variant :default
|
||||
:on-click (fn []
|
||||
(->
|
||||
(p/let [result (export/backup-db-graph repo :set-folder)]
|
||||
(p/let [result (export/backup-db-graph repo)]
|
||||
(case result
|
||||
true
|
||||
(notification/show! "Backup successful!" :success)
|
||||
:graph-not-changed
|
||||
(notification/show! "Graph has not been updated since last export." :success)
|
||||
nil)
|
||||
(export/auto-db-backup! repo {:backup-now? false}))
|
||||
(export/auto-db-backup! repo))
|
||||
(p/catch (fn [error]
|
||||
(println "Failed to backup.")
|
||||
(js/console.error error)))))}
|
||||
@@ -139,7 +139,9 @@
|
||||
"Export debug transit file"]
|
||||
[:p.text-sm.opacity-70.mb-0 "Exports to a .transit file to send to us for debugging. Any sensitive data will be removed in the exported file."]])
|
||||
|
||||
(when (and db-based? (not (util/mobile?)))
|
||||
(when (and db-based?
|
||||
util/web-platform?
|
||||
(not (util/mobile?)))
|
||||
[:div
|
||||
[:hr]
|
||||
(auto-backup)])]])))
|
||||
|
||||
@@ -363,7 +363,7 @@
|
||||
bytes-array (js/Uint8Array. buffer)
|
||||
checksum (db-asset/<get-file-array-buffer-checksum buffer)
|
||||
asset-id (d/squuid)
|
||||
asset-name (gp-exporter/asset-path->name (:path file))
|
||||
asset-name (some-> (:path file) gp-exporter/asset-path->name)
|
||||
assets-dir (path/path-join repo-dir common-config/local-assets-dir)
|
||||
asset-type (db-asset/asset-path->type (:path file))
|
||||
{:keys [with-edn-content pdf-annotation?]} (buffer-handler bytes-array)]
|
||||
|
||||
@@ -144,9 +144,23 @@
|
||||
(not exact-match?)
|
||||
(not (string/blank? @*input))
|
||||
(not (exact-match-exclude-items @*input)))
|
||||
(->>
|
||||
(cons new-option search-result')
|
||||
(remove nil?))
|
||||
(let [current-input (exact-transform-fn @*input)
|
||||
matches? (some (fn [item]
|
||||
(and (string? item)
|
||||
(string? current-input)
|
||||
(string/includes?
|
||||
(string/lower-case item)
|
||||
(string/lower-case current-input))))
|
||||
(set (map (comp exact-transform-fn str extract-fn) search-result')))]
|
||||
(->>
|
||||
(if matches?
|
||||
(cons
|
||||
(first search-result')
|
||||
(cons
|
||||
new-option
|
||||
(rest search-result')))
|
||||
(cons new-option search-result'))
|
||||
(remove nil?)))
|
||||
search-result')
|
||||
input-opts' (if (fn? input-opts) (input-opts (empty? search-result)) input-opts)
|
||||
input-container (or
|
||||
|
||||
@@ -801,7 +801,8 @@
|
||||
(tooltip-row t enable-tooltip?))
|
||||
(timetracking-row t enable-timetracking?)
|
||||
(enable-all-pages-public-row t enable-all-pages-public?)
|
||||
(auto-push-row t current-repo enable-git-auto-push?)]))
|
||||
(when-not db-graph?
|
||||
(auto-push-row t current-repo enable-git-auto-push?))]))
|
||||
|
||||
(rum/defc settings-git
|
||||
[]
|
||||
@@ -1505,7 +1506,7 @@
|
||||
|
||||
(when db-based?
|
||||
[:ai (t :settings-page/tab-ai) (t :settings-page/ai) (ui/icon "wand")])
|
||||
(when (util/electron?)
|
||||
(when (and (util/electron?) (not db-based?))
|
||||
[:version-control "git" (t :settings-page/tab-version-control) (ui/icon "history")])
|
||||
|
||||
;; (when (util/electron?)
|
||||
|
||||
@@ -188,14 +188,13 @@
|
||||
(defn ensure-ref-block!
|
||||
[pdf-current hl insert-opts]
|
||||
(if (config/db-based-graph? (state/get-current-repo))
|
||||
(p/chain
|
||||
(db-based-ensure-ref-block! pdf-current hl insert-opts)
|
||||
(fn []
|
||||
;; try to move the asset block to the ref block
|
||||
(let [ref-block (db-model/query-block-by-uuid (:id hl))
|
||||
asset-block (:logseq.property.pdf/hl-image ref-block)]
|
||||
(when asset-block
|
||||
(editor-handler/move-blocks! [asset-block] ref-block {:sibling? false})))))
|
||||
(p/let [ref-block (db-based-ensure-ref-block! pdf-current hl insert-opts)
|
||||
asset-block (:logseq.property.pdf/hl-image ref-block)]
|
||||
;; try to move the asset block to the ref block
|
||||
(p/do!
|
||||
(when asset-block
|
||||
(editor-handler/move-blocks! [asset-block] ref-block {:sibling? false}))
|
||||
ref-block))
|
||||
(file-based-ensure-ref-block! pdf-current hl insert-opts)))
|
||||
|
||||
(defn file-based-load-hls-data$
|
||||
|
||||
@@ -2131,7 +2131,8 @@
|
||||
keep-uuid?
|
||||
revert-cut-txs
|
||||
skip-empty-target?
|
||||
ops-only?]
|
||||
ops-only?
|
||||
outliner-real-op]
|
||||
:or {exclude-properties []}}]
|
||||
(let [editing-block (when-let [editing-block (state/get-edit-block)]
|
||||
(some-> (db/entity [:block/uuid (:block/uuid editing-block)])
|
||||
@@ -2177,6 +2178,7 @@
|
||||
blocks)]
|
||||
(outliner-op/insert-blocks! blocks' target-block' {:sibling? sibling?
|
||||
:outliner-op :paste
|
||||
:outliner-real-op outliner-real-op
|
||||
:replace-empty-target? replace-empty-target?
|
||||
:keep-uuid? keep-uuid?}))))]
|
||||
(if ops-only?
|
||||
@@ -3851,10 +3853,11 @@
|
||||
|
||||
(defn toggle-open-block-children! [block-id]
|
||||
(p/let [blocks (<all-blocks-with-level {:incremental? false
|
||||
:collapse? true
|
||||
:expanded? true
|
||||
:root-block block-id})
|
||||
all-expanded? (empty? blocks)]
|
||||
(if all-expanded?
|
||||
children-blocks (remove #(= block-id (:block/uuid %)) blocks)
|
||||
any-expanded? (seq (filter (complement util/collapsed?) children-blocks))]
|
||||
(if any-expanded?
|
||||
(collapse-all! block-id {:collapse-self? false})
|
||||
(expand-all! block-id))))
|
||||
|
||||
|
||||
@@ -113,19 +113,25 @@
|
||||
(repo-handler/refresh-repos!))))
|
||||
|
||||
(defmethod handle :graph/switch [[_ graph opts]]
|
||||
(export/cancel-db-backup!)
|
||||
(persist-db/export-current-graph!)
|
||||
(state/set-state! :db/async-queries {})
|
||||
(st/refresh!)
|
||||
|
||||
(p/let [writes-finished? (state/<invoke-db-worker :thread-api/file-writes-finished? (state/get-current-repo))]
|
||||
(if (not writes-finished?) ; TODO: test (:sync-graph/init? @state/state)
|
||||
(do
|
||||
(log/info :graph/switch {:file-writes-finished? writes-finished?})
|
||||
(notification/show!
|
||||
"Please wait seconds until all changes are saved for the current graph."
|
||||
:warning))
|
||||
(graph-switch-on-persisted graph opts))))
|
||||
(let [switch-promise
|
||||
(p/do!
|
||||
(export/cancel-db-backup!)
|
||||
(persist-db/export-current-graph!)
|
||||
(state/set-state! :db/async-queries {})
|
||||
(st/refresh!)
|
||||
(if (config/db-based-graph?)
|
||||
(graph-switch-on-persisted graph opts)
|
||||
(p/let [writes-finished? (state/<invoke-db-worker :thread-api/file-writes-finished? (state/get-current-repo))]
|
||||
(if (not writes-finished?) ; TODO: test (:sync-graph/init? @state/state)
|
||||
(do
|
||||
(log/info :graph/switch {:file-writes-finished? writes-finished?})
|
||||
(notification/show!
|
||||
"Please wait seconds until all changes are saved for the current graph."
|
||||
:warning))
|
||||
(graph-switch-on-persisted graph opts)))))]
|
||||
(p/then switch-promise
|
||||
(fn [_]
|
||||
(export/backup-db-graph (state/get-current-repo))))))
|
||||
|
||||
(defmethod handle :graph/open-new-window [[_ev target-repo]]
|
||||
(ui-handler/open-new-window-or-tab! target-repo))
|
||||
@@ -178,14 +184,12 @@
|
||||
(not util/nfs?))
|
||||
(state/pub-event! [:graph/dir-gone dir]))))
|
||||
(let [db-based? (config/db-based-graph? repo)]
|
||||
(p/do!
|
||||
(state/pub-event! [:graph/sync-context])
|
||||
;; FIXME: an ugly implementation for redirecting to page on new window is restored
|
||||
(repo-handler/graph-ready! repo)
|
||||
(when-not config/publishing?
|
||||
(if db-based?
|
||||
(export/auto-db-backup! repo {:backup-now? true})
|
||||
(fs-watcher/load-graph-files! repo))))))
|
||||
;; FIXME: an ugly implementation for redirecting to page on new window is restored
|
||||
(repo-handler/graph-ready! repo)
|
||||
|
||||
(when-not config/publishing?
|
||||
(when-not db-based?
|
||||
(fs-watcher/load-graph-files! repo)))))
|
||||
|
||||
(defmethod handle :instrument [[_ {:keys [type payload] :as opts}]]
|
||||
(when-not (empty? (dissoc opts :type :payload))
|
||||
@@ -196,8 +200,8 @@
|
||||
(let [[user-uuid graph-uuid tx-id] @sync/graphs-txid
|
||||
payload (merge
|
||||
{:schema-version (str db-schema/version)
|
||||
:db-schema-version (when-let [db (frontend.db/get-db)]
|
||||
(str (:kv/value (frontend.db/entity db :logseq.kv/schema-version))))
|
||||
:db-schema-version (when-let [db (db/get-db)]
|
||||
(str (:kv/value (db/entity db :logseq.kv/schema-version))))
|
||||
:user-id user-uuid
|
||||
:graph-id graph-uuid
|
||||
:tx-id tx-id
|
||||
@@ -260,9 +264,11 @@
|
||||
|
||||
(defmethod handle :graph/restored [[_ graph]]
|
||||
(when graph (assets-handler/ensure-assets-dir! graph))
|
||||
(state/pub-event! [:graph/sync-context])
|
||||
(export/auto-db-backup! graph)
|
||||
(rtc-flows/trigger-rtc-start graph)
|
||||
(fsrs/update-due-cards-count)
|
||||
(when-not (mobile-util/native-ios?)
|
||||
(when-not (mobile-util/native-platform?)
|
||||
(state/pub-event! [:graph/ready graph])))
|
||||
|
||||
(defmethod handle :whiteboard-link [[_ shapes]]
|
||||
|
||||
@@ -264,8 +264,8 @@
|
||||
(db/transact! [(ldb/kv :logseq.kv/graph-backup-folder folder-name)])
|
||||
[folder-name handle]))
|
||||
|
||||
(defn backup-db-graph
|
||||
[repo _backup-type]
|
||||
(defn- web-backup-db-graph
|
||||
[repo]
|
||||
(when (and repo (= repo (state/get-current-repo)))
|
||||
(when-let [backup-folder (ldb/get-key-value (db/get-db repo) :logseq.kv/graph-backup-folder)]
|
||||
;; ensure file handle exists
|
||||
@@ -310,6 +310,11 @@
|
||||
(notification/show! "DB backup failed, please go to Export and specify a backup folder." :error)
|
||||
false))))))
|
||||
|
||||
(defn backup-db-graph
|
||||
[repo]
|
||||
(when (and (config/db-based-graph? repo) (not (util/capacitor?)))
|
||||
(web-backup-db-graph repo)))
|
||||
|
||||
(defonce *backup-interval (atom nil))
|
||||
(defn cancel-db-backup!
|
||||
[]
|
||||
@@ -317,15 +322,15 @@
|
||||
(js/clearInterval i)))
|
||||
|
||||
(defn auto-db-backup!
|
||||
[repo {:keys [backup-now?]
|
||||
:or {backup-now? true}}]
|
||||
(when (ldb/get-key-value (db/get-db repo) :logseq.kv/graph-backup-folder)
|
||||
(when (and (config/db-based-graph? repo) util/web-platform? (utils/nfsSupported))
|
||||
(cancel-db-backup!)
|
||||
[repo]
|
||||
(when (and
|
||||
(config/db-based-graph? repo)
|
||||
util/web-platform?
|
||||
(not (util/capacitor?))
|
||||
(ldb/get-key-value (db/get-db repo) :logseq.kv/graph-backup-folder))
|
||||
(cancel-db-backup!)
|
||||
|
||||
(when backup-now? (backup-db-graph repo :backup-now))
|
||||
|
||||
;; run backup every hour
|
||||
(let [interval (js/setInterval #(backup-db-graph repo :auto)
|
||||
(* 1 60 60 1000))]
|
||||
(reset! *backup-interval interval)))))
|
||||
;; run backup every hour
|
||||
(let [interval (js/setInterval #(backup-db-graph repo)
|
||||
(* 1 60 60 1000))]
|
||||
(reset! *backup-interval interval))))
|
||||
|
||||
@@ -40,7 +40,8 @@
|
||||
(update :block/title (fn [title]
|
||||
(let [title' (db-content/replace-tags-with-id-refs title refs)]
|
||||
(db-content/title-ref->id-ref title' refs)))))))))]
|
||||
(editor-handler/paste-blocks blocks' {:keep-uuid? true}))))
|
||||
(editor-handler/paste-blocks blocks' {:keep-uuid? true
|
||||
:outliner-real-op :paste-text}))))
|
||||
|
||||
(defn- paste-segmented-text
|
||||
[format text]
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
(let [tx (@*last-synced-graph->tx repo)
|
||||
db (db/get-db repo)]
|
||||
(or (nil? tx)
|
||||
(> tx (:max-tx db)))))
|
||||
(> (:max-tx db) tx))))
|
||||
|
||||
(defn export-current-graph!
|
||||
[& {:keys [succ-notification? force-save?]}]
|
||||
|
||||
@@ -1301,12 +1301,21 @@ Similar to re-frame subscriptions"
|
||||
[item]
|
||||
(update-state! [:ui/navigation-item-collapsed? item] not))
|
||||
|
||||
(declare sidebar-add-block!)
|
||||
(defn- sidebar-add-content-when-open!
|
||||
[]
|
||||
(when (empty? (:sidebar/blocks @state))
|
||||
(sidebar-add-block! (get-current-repo) "contents" :contents)))
|
||||
|
||||
(defn toggle-sidebar-open?!
|
||||
[]
|
||||
(when-not (:ui/sidebar-open? @state)
|
||||
(sidebar-add-content-when-open!))
|
||||
(swap! state update :ui/sidebar-open? not))
|
||||
|
||||
(defn open-right-sidebar!
|
||||
[]
|
||||
(sidebar-add-content-when-open!)
|
||||
(swap! state assoc :ui/sidebar-open? true))
|
||||
|
||||
(defn hide-right-sidebar!
|
||||
|
||||
Reference in New Issue
Block a user