mirror of
https://github.com/logseq/logseq.git
synced 2026-02-01 22:47:36 +00:00
chore: remove file-specific graph-parser code
Also removed file graph code for markdown export as a prereq
This commit is contained in:
@@ -196,7 +196,6 @@
|
||||
logseq.db.sqlite.export sqlite-export
|
||||
logseq.db.sqlite.util sqlite-util
|
||||
logseq.db.test.helper db-test
|
||||
logseq.graph-parser graph-parser
|
||||
logseq.graph-parser.block gp-block
|
||||
logseq.graph-parser.mldoc gp-mldoc
|
||||
logseq.graph-parser.property gp-property
|
||||
|
||||
51
deps/cli/src/logseq/cli/common/file.cljs
vendored
51
deps/cli/src/logseq/cli/common/file.cljs
vendored
@@ -4,11 +4,8 @@
|
||||
(:require [clojure.string :as string]
|
||||
[datascript.core :as d]
|
||||
[logseq.db :as ldb]
|
||||
[logseq.db.common.entity-plus :as entity-plus]
|
||||
[logseq.db.frontend.content :as db-content]
|
||||
[logseq.db.sqlite.create-graph :as sqlite-create-graph]
|
||||
[logseq.db.sqlite.util :as sqlite-util]
|
||||
[logseq.graph-parser.property :as gp-property]
|
||||
[logseq.outliner.tree :as otree]))
|
||||
|
||||
(defn- indented-block-content
|
||||
@@ -16,31 +13,11 @@
|
||||
(let [lines (string/split-lines content)]
|
||||
(string/join (str "\n" spaces-tabs) lines)))
|
||||
|
||||
(defn- content-with-collapsed-state
|
||||
"Only accept nake content (without any indentation)"
|
||||
[repo format content collapsed?]
|
||||
(cond
|
||||
collapsed?
|
||||
(gp-property/insert-property repo format content :collapsed true)
|
||||
|
||||
;; Don't check properties. Collapsed is an internal state log as property in file, but not counted into properties
|
||||
(false? collapsed?)
|
||||
(gp-property/remove-property format :collapsed content)
|
||||
|
||||
:else
|
||||
content))
|
||||
|
||||
(defn- ^:large-vars/cleanup-todo transform-content
|
||||
[repo db {:block/keys [collapsed? format pre-block? properties] :as b} level {:keys [heading-to-list?]} context {:keys [db-based?]}]
|
||||
(let [title (or (:block/raw-title b) (:block/title b))
|
||||
block-ref-not-saved? (and (not db-based?)
|
||||
(first (:block/_refs (d/entity db (:db/id b))))
|
||||
(not (string/includes? title (str (:block/uuid b)))))
|
||||
heading (:heading properties)
|
||||
title (if db-based?
|
||||
[db {:block/keys [format pre-block? properties] :as b} level {:keys [heading-to-list?]} context]
|
||||
(let [heading (:heading properties)
|
||||
;; replace [[uuid]] with block's content
|
||||
(db-content/recur-replace-uuid-in-block-title (d/entity db (:db/id b)))
|
||||
title)
|
||||
title (db-content/recur-replace-uuid-in-block-title (d/entity db (:db/id b)))
|
||||
content (or title "")
|
||||
content (cond
|
||||
pre-block?
|
||||
@@ -69,25 +46,21 @@
|
||||
(-> (string/replace content #"^\s?#+\s+" "")
|
||||
(string/replace #"^\s?#+\s?$" ""))
|
||||
content)
|
||||
content (if db-based? content (content-with-collapsed-state repo format content collapsed?))
|
||||
new-content (indented-block-content (string/trim content) spaces-tabs)
|
||||
sep (if (string/blank? new-content)
|
||||
""
|
||||
" ")]
|
||||
(str prefix sep new-content)))]
|
||||
(if block-ref-not-saved?
|
||||
(gp-property/insert-property repo format content :id (str (:block/uuid b)))
|
||||
content)))
|
||||
content))
|
||||
|
||||
(defn- tree->file-content-aux
|
||||
[repo db tree {:keys [init-level link] :as opts} context]
|
||||
(let [db-based? (sqlite-util/db-based-graph? repo)
|
||||
block-contents (transient [])]
|
||||
(let [block-contents (transient [])]
|
||||
(loop [[f & r] tree level init-level]
|
||||
(if (nil? f)
|
||||
(->> block-contents persistent! flatten (remove nil?))
|
||||
(let [page? (nil? (:block/page f))
|
||||
content (if (and page? (not link)) nil (transform-content repo db f level opts context {:db-based? db-based?}))
|
||||
content (if (and page? (not link)) nil (transform-content db f level opts context))
|
||||
new-content
|
||||
(if-let [children (seq (:block/children f))]
|
||||
(cons content (tree->file-content-aux repo db children {:init-level (inc level)} context))
|
||||
@@ -104,9 +77,7 @@
|
||||
(defn- update-block-content
|
||||
[db item eid]
|
||||
;; This may not be needed if this becomes a file-graph only context
|
||||
(if (entity-plus/db-based-graph? db)
|
||||
(db-content/update-block-content db item eid)
|
||||
item))
|
||||
(db-content/update-block-content db item eid))
|
||||
|
||||
(defn block->content
|
||||
"Converts a block including its children (recursively) to plain-text."
|
||||
@@ -126,11 +97,9 @@
|
||||
(defn get-all-page->content
|
||||
"Exports a graph's pages as tuples of page name and page content"
|
||||
[repo db options]
|
||||
(let [filter-fn (if (ldb/db-based-graph? db)
|
||||
(fn [ent]
|
||||
(or (not (:logseq.property/built-in? ent))
|
||||
(contains? sqlite-create-graph/built-in-pages-names (:block/title ent))))
|
||||
(constantly true))]
|
||||
(let [filter-fn (fn [ent]
|
||||
(or (not (:logseq.property/built-in? ent))
|
||||
(contains? sqlite-create-graph/built-in-pages-names (:block/title ent))))]
|
||||
(->> (d/datoms db :avet :block/name)
|
||||
(map #(d/entity db (:e %)))
|
||||
(filter filter-fn)
|
||||
|
||||
5
deps/graph-parser/.carve/config.edn
vendored
5
deps/graph-parser/.carve/config.edn
vendored
@@ -1,7 +1,4 @@
|
||||
{:paths ["src"]
|
||||
:api-namespaces [logseq.graph-parser.property
|
||||
logseq.graph-parser.exporter
|
||||
;; Used in tests
|
||||
logseq.graph-parser.test.docs-graph-helper
|
||||
:api-namespaces [logseq.graph-parser.exporter
|
||||
logseq.graph-parser.schema.mldoc]
|
||||
:report {:format :ignore}}
|
||||
|
||||
14
deps/graph-parser/.carve/ignore
vendored
14
deps/graph-parser/.carve/ignore
vendored
@@ -1,9 +1,9 @@
|
||||
;; For CLI
|
||||
logseq.graph-parser.mldoc/ast-export-markdown
|
||||
;; API
|
||||
logseq.graph-parser.mldoc/link?
|
||||
logseq.graph-parser.mldoc/block-with-title?
|
||||
;; API
|
||||
logseq.graph-parser/get-blocks-to-delete
|
||||
logseq.graph-parser.mldoc/link?
|
||||
;; API
|
||||
logseq.graph-parser.text/get-file-basename
|
||||
;; API
|
||||
@@ -11,17 +11,9 @@ logseq.graph-parser.mldoc/mldoc-link?
|
||||
;; public var
|
||||
logseq.graph-parser.schema.mldoc/block-ast-coll-schema
|
||||
;; API
|
||||
logseq.graph-parser/import-file-to-db-graph
|
||||
;; API
|
||||
logseq.graph-parser.block/extract-plain
|
||||
;; API
|
||||
logseq.graph-parser.block/extract-refs-from-text
|
||||
;; API
|
||||
logseq.graph-parser.text/get-page-name
|
||||
logseq.graph-parser.text/get-namespace-last-part
|
||||
;; API
|
||||
logseq.graph-parser.whiteboard/shape->block
|
||||
;; API
|
||||
logseq.graph-parser/parse-file
|
||||
;; API
|
||||
logseq.graph-parser/filter-files
|
||||
logseq.graph-parser.test.docs-graph-helper/clone-docs-repo-if-not-exists
|
||||
1
deps/graph-parser/.clj-kondo/config.edn
vendored
1
deps/graph-parser/.clj-kondo/config.edn
vendored
@@ -9,7 +9,6 @@
|
||||
:consistent-alias
|
||||
{:aliases {clojure.string string
|
||||
datascript.core d
|
||||
logseq.graph-parser graph-parser
|
||||
logseq.graph-parser.text text
|
||||
logseq.graph-parser.block gp-block
|
||||
logseq.graph-parser.mldoc gp-mldoc
|
||||
|
||||
12
deps/graph-parser/README.md
vendored
12
deps/graph-parser/README.md
vendored
@@ -1,18 +1,16 @@
|
||||
## Description
|
||||
|
||||
This library parses a logseq graph directory and returns it as a datascript
|
||||
database connection. This library powers the Logseq app and also runs from the
|
||||
commandline, _independent_ of the app. This is powerful as this can run anywhere
|
||||
that a Node.js script has access to a Logseq graph e.g. on CI processes like
|
||||
Github Actions. This library is compatible with ClojureScript and with
|
||||
This library parses a file graph directory and returns it as a datascript
|
||||
database connection. This library mainly exists to convert file graphs to DB graphs.
|
||||
This library is compatible with ClojureScript and with
|
||||
[nbb-logseq](https://github.com/logseq/nbb-logseq) to respectively provide
|
||||
frontend and commandline functionality.
|
||||
|
||||
## API
|
||||
|
||||
This library is under the parent namespace `logseq.graph-parser`. This library
|
||||
provides two main namespaces for parsing, `logseq.graph-parser`.
|
||||
`logseq.graph-parser/parse-file` is the main fn for the frontend.
|
||||
provides two main namespaces, `logseq.graph-parser.exporter` and
|
||||
`logseq.graph-parser.extract`.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
131
deps/graph-parser/src/logseq/graph_parser.cljs
vendored
131
deps/graph-parser/src/logseq/graph_parser.cljs
vendored
@@ -1,131 +0,0 @@
|
||||
(ns logseq.graph-parser
|
||||
"For file graphs, provides main ns to parse graph from source files.
|
||||
Used by logseq app to parse graph and then save to the given database connection"
|
||||
(:require [clojure.set :as set]
|
||||
[clojure.string :as string]
|
||||
[datascript.core :as d]
|
||||
[logseq.common.config :as common-config]
|
||||
[logseq.common.util :as common-util]
|
||||
[logseq.db :as ldb]
|
||||
[logseq.db.file-based.schema :as file-schema]
|
||||
[logseq.graph-parser.extract :as extract]))
|
||||
|
||||
(defn- retract-blocks-tx
|
||||
[blocks retain-uuids]
|
||||
(mapcat (fn [{uuid' :block/uuid eid :db/id}]
|
||||
(if (and uuid' (contains? retain-uuids uuid'))
|
||||
(map (fn [attr] [:db.fn/retractAttribute eid attr]) file-schema/retract-attributes)
|
||||
(when eid [[:db.fn/retractEntity eid]])))
|
||||
blocks))
|
||||
|
||||
(defn- get-file-page
|
||||
"Copy of file-model/get-file-page. Too basic to couple to main app"
|
||||
[db file-path]
|
||||
(ffirst
|
||||
(d/q
|
||||
'[:find ?page
|
||||
:in $ ?path
|
||||
:where
|
||||
[?file :file/path ?path]
|
||||
[?page :block/file ?file]]
|
||||
db
|
||||
file-path)))
|
||||
|
||||
(defn get-blocks-to-delete
|
||||
"Returns the transactional operations to retract blocks belonging to the
|
||||
given page name and file path. This function is required when a file is being
|
||||
parsed from disk; before saving the parsed, blocks from the previous version
|
||||
of that file need to be retracted.
|
||||
|
||||
The 'Page' parsed from the new file version is passed separately from the
|
||||
file-path, as the page name can be set via properties in the file, and thus
|
||||
can change between versions. If it has changed, existing blocks for both the
|
||||
old and new page name will be retracted.
|
||||
|
||||
Blocks are by default fully cleared via retractEntity. However, a collection
|
||||
of block UUIDs to retain can be passed, and any blocks with matching uuids
|
||||
will instead have their attributes cleared individually via
|
||||
'retractAttribute'. This will preserve block references to the retained
|
||||
UUIDs."
|
||||
[db file-page file-path retain-uuid-blocks]
|
||||
(let [existing-file-page (get-file-page db file-path)
|
||||
pages-to-clear (distinct (filter some? [existing-file-page (:db/id file-page)]))
|
||||
blocks (mapcat (fn [page-id]
|
||||
(:block/_page (d/entity db page-id)))
|
||||
pages-to-clear)
|
||||
retain-uuids (set (keep :block/uuid retain-uuid-blocks))]
|
||||
(retract-blocks-tx (distinct blocks) retain-uuids)))
|
||||
|
||||
(defn parse-file
|
||||
"Parse file and save parsed data to the given db. Main parse fn used by logseq app.
|
||||
Options available:
|
||||
|
||||
* :delete-blocks-fn - Optional fn which is called with the new page, file and existing block uuids
|
||||
which may be referenced elsewhere. Used to delete the existing blocks before saving the new ones.
|
||||
Implemented in file-common-handler/validate-and-get-blocks-to-delete for IoC
|
||||
* :extract-options - Options map to pass to extract/extract"
|
||||
([conn file-path content] (parse-file conn file-path content {}))
|
||||
([conn file-path content {:keys [delete-blocks-fn extract-options ctime mtime]
|
||||
:or {delete-blocks-fn (constantly [])}
|
||||
:as options}]
|
||||
(let [format (common-util/get-format file-path)
|
||||
file-content [{:file/path file-path}]
|
||||
{:keys [tx ast]}
|
||||
(let [extract-options' (merge {:block-pattern (common-config/get-block-pattern format)
|
||||
:date-formatter "MMM do, yyyy"
|
||||
:uri-encoded? false
|
||||
:filename-format :legacy}
|
||||
extract-options
|
||||
{:db @conn})
|
||||
{:keys [pages blocks ast refs]
|
||||
:or {pages []
|
||||
blocks []
|
||||
ast []}}
|
||||
(cond (contains? common-config/mldoc-support-formats format)
|
||||
(extract/extract file-path content extract-options')
|
||||
|
||||
(common-config/whiteboard? file-path)
|
||||
(extract/extract-whiteboard-edn file-path content extract-options')
|
||||
|
||||
:else nil)
|
||||
block-ids (map (fn [block] {:block/uuid (:block/uuid block)}) blocks)
|
||||
delete-blocks (delete-blocks-fn (first pages) file-path block-ids)
|
||||
block-refs-ids (->> (mapcat :block/refs blocks)
|
||||
(filter (fn [ref] (and (vector? ref)
|
||||
(= :block/uuid (first ref)))))
|
||||
(map (fn [ref] {:block/uuid (second ref)}))
|
||||
(seq))
|
||||
;; To prevent "unique constraint" on datascript
|
||||
block-ids (set/union (set block-ids) (set block-refs-ids))
|
||||
pages (extract/with-ref-pages pages blocks)
|
||||
pages-index (map #(select-keys % [:block/name]) pages)]
|
||||
;; does order matter?
|
||||
{:tx (concat file-content refs pages-index delete-blocks pages block-ids blocks)
|
||||
:ast ast})
|
||||
file-entity (d/entity @conn [:file/path file-path])
|
||||
tx (concat tx [(cond-> {:file/path file-path
|
||||
:file/content content}
|
||||
(or ctime (nil? file-entity))
|
||||
(assoc :file/created-at (or ctime (js/Date.)))
|
||||
mtime
|
||||
(assoc :file/last-modified-at mtime))])]
|
||||
(ldb/transact! conn tx (select-keys options [:new-graph? :from-disk?]))
|
||||
{:tx tx
|
||||
:ast ast})))
|
||||
|
||||
(defn filter-files
|
||||
"Filters files in preparation for parsing. Only includes files that are
|
||||
supported by parser"
|
||||
[files]
|
||||
(let [support-files (filter
|
||||
(fn [file]
|
||||
(let [format (common-util/get-format (:file/path file))]
|
||||
(contains? (set/union #{:edn :css} common-config/mldoc-support-formats) format)))
|
||||
files)
|
||||
support-files (sort-by :file/path support-files)
|
||||
{journals true non-journals false} (group-by (fn [file] (string/includes? (:file/path file) "journals/")) support-files)
|
||||
{built-in true others false} (group-by (fn [file]
|
||||
(or (string/includes? (:file/path file) "contents.")
|
||||
(string/includes? (:file/path file) ".edn")
|
||||
(string/includes? (:file/path file) "custom.css"))) non-journals)]
|
||||
(concat (reverse journals) built-in others)))
|
||||
@@ -5,7 +5,6 @@
|
||||
[clojure.string :as string]
|
||||
[clojure.walk :as walk]
|
||||
[datascript.core :as d]
|
||||
[datascript.impl.entity :as de]
|
||||
[logseq.common.config :as common-config]
|
||||
[logseq.common.date :as common-date]
|
||||
[logseq.common.util :as common-util]
|
||||
@@ -866,61 +865,4 @@
|
||||
[others parents' result'])))]
|
||||
(recur blocks parents result))))
|
||||
result' (map (fn [block] (assoc block :block/order (db-order/gen-key))) result)]
|
||||
(concat result' other-blocks)))
|
||||
|
||||
(defn extract-plain
|
||||
"Extract plain elements including page refs"
|
||||
[repo content]
|
||||
(let [ast (gp-mldoc/->edn repo content :markdown)
|
||||
*result (atom [])]
|
||||
(walk/prewalk
|
||||
(fn [f]
|
||||
(cond
|
||||
;; tag
|
||||
(and (vector? f)
|
||||
(= "Tag" (first f)))
|
||||
nil
|
||||
|
||||
;; nested page ref
|
||||
(and (vector? f)
|
||||
(= "Nested_link" (first f)))
|
||||
(swap! *result conj (:content (second f)))
|
||||
|
||||
;; page ref
|
||||
(and (vector? f)
|
||||
(= "Link" (first f))
|
||||
(map? (second f))
|
||||
(vector? (:url (second f)))
|
||||
(= "Page_ref" (first (:url (second f)))))
|
||||
(swap! *result conj
|
||||
(:full_text (second f)))
|
||||
|
||||
;; plain
|
||||
(and (vector? f)
|
||||
(= "Plain" (first f)))
|
||||
(swap! *result conj (second f))
|
||||
|
||||
:else
|
||||
f))
|
||||
ast)
|
||||
(-> (string/trim (apply str @*result))
|
||||
text/page-ref-un-brackets!)))
|
||||
|
||||
(defn extract-refs-from-text
|
||||
[repo db text date-formatter]
|
||||
(when (string? text)
|
||||
(let [ast-refs (gp-mldoc/get-references text (gp-mldoc/get-default-config repo :markdown))
|
||||
page-refs (map #(get-page-reference % :markdown) ast-refs)
|
||||
block-refs (map get-block-reference ast-refs)
|
||||
refs' (->> (concat page-refs block-refs)
|
||||
(remove string/blank?)
|
||||
distinct)]
|
||||
(-> (map #(cond
|
||||
(de/entity? %)
|
||||
{:block/uuid (:block/uuid %)}
|
||||
(common-util/uuid-string? %)
|
||||
{:block/uuid (uuid %)}
|
||||
:else
|
||||
(page-name->map % db true date-formatter))
|
||||
refs')
|
||||
set))))
|
||||
(concat result' other-blocks)))
|
||||
@@ -212,27 +212,9 @@
|
||||
(or (contains? #{"Nested_link"} (first result'))
|
||||
(contains? #{"Page_ref" "Block_ref" "Complex"} (first (:url (second result')))))))))
|
||||
|
||||
(defn properties?
|
||||
[ast]
|
||||
(contains? #{"Properties" "Property_Drawer"} (ffirst ast)))
|
||||
|
||||
(defn block-with-title?
|
||||
[type]
|
||||
(contains? #{"Paragraph"
|
||||
"Raw_Html"
|
||||
"Hiccup"
|
||||
"Heading"} type))
|
||||
|
||||
(defn- has-title?
|
||||
[repo content format]
|
||||
(let [ast (->edn repo content format)]
|
||||
(block-with-title? (ffirst (map first ast)))))
|
||||
|
||||
(defn get-title&body
|
||||
"parses content and returns [title body]
|
||||
returns nil if no title"
|
||||
[repo content format]
|
||||
(let [lines (string/split-lines content)]
|
||||
(if (has-title? repo content format)
|
||||
[(first lines) (string/join "\n" (rest lines))]
|
||||
[nil (string/join "\n" lines)])))
|
||||
"Heading"} type))
|
||||
@@ -4,9 +4,7 @@
|
||||
[clojure.string :as string]
|
||||
[goog.string :as gstring]
|
||||
[goog.string.format]
|
||||
[logseq.common.util :as common-util]
|
||||
[logseq.common.util.page-ref :as page-ref]
|
||||
[logseq.graph-parser.mldoc :as gp-mldoc]))
|
||||
[logseq.common.util :as common-util]))
|
||||
|
||||
(def colons "Property delimiter for markdown mode" "::")
|
||||
(defn colons-org
|
||||
@@ -14,13 +12,6 @@
|
||||
[property]
|
||||
(str ":" property ":"))
|
||||
|
||||
(defn ->block-content
|
||||
"Creates a block content string from properties map"
|
||||
[properties]
|
||||
(->> properties
|
||||
(map #(str (name (key %)) (str colons " ") (val %)))
|
||||
(string/join "\n")))
|
||||
|
||||
(defn properties-ast?
|
||||
[block]
|
||||
(and
|
||||
@@ -40,11 +31,6 @@
|
||||
;; these properties are hidden from the user but a few like the editable ones
|
||||
;; are visible for the user to edit.
|
||||
|
||||
(def built-in-extended-properties (atom #{}))
|
||||
(defn register-built-in-properties
|
||||
[props]
|
||||
(reset! built-in-extended-properties (set/union @built-in-extended-properties props)))
|
||||
|
||||
(def editable-linkable-built-in-properties
|
||||
"Properties used by logseq that user can edit and that can have linkable property values"
|
||||
#{:alias :aliases :tags})
|
||||
@@ -61,16 +47,14 @@
|
||||
(defn hidden-built-in-properties
|
||||
"Properties used by logseq that user can't edit or see"
|
||||
[]
|
||||
(set/union
|
||||
#{:custom-id :background_color :created_at :last_modified_at ; backward compatibility only
|
||||
:id :background-color :heading :collapsed
|
||||
:created-at :updated-at :last-modified-at
|
||||
:query-table :query-properties :query-sort-by :query-sort-desc :ls-type
|
||||
:hl-type :hl-page :hl-stamp :hl-color :hl-value :logseq.macro-name :logseq.macro-arguments
|
||||
:logseq.order-list-type :logseq.tldraw.page :logseq.tldraw.shape
|
||||
#{:custom-id :background_color :created_at :last_modified_at ; backward compatibility only
|
||||
:id :background-color :heading :collapsed
|
||||
:created-at :updated-at :last-modified-at
|
||||
:query-table :query-properties :query-sort-by :query-sort-desc :ls-type
|
||||
:hl-type :hl-page :hl-stamp :hl-color :hl-value :logseq.macro-name :logseq.macro-arguments
|
||||
:logseq.order-list-type :logseq.tldraw.page :logseq.tldraw.shape
|
||||
; task markers
|
||||
:todo :doing :now :later :done}
|
||||
@built-in-extended-properties))
|
||||
:todo :doing :now :later :done})
|
||||
|
||||
(def built-in-property-types
|
||||
"Types for built-in properties. Built-in properties whose values are to be
|
||||
@@ -106,9 +90,6 @@
|
||||
[]
|
||||
(set/difference (set/union (hidden-built-in-properties)
|
||||
(editable-built-in-properties))
|
||||
;; Most of these need to be auto-parsed as integers so exclude
|
||||
;; them until we have ones that must be unparsed
|
||||
@built-in-extended-properties
|
||||
;; Refs need to be parsed
|
||||
editable-linkable-built-in-properties
|
||||
;; All these should be parsed by gp-text/parse-non-string-property-value
|
||||
@@ -119,7 +100,7 @@
|
||||
(defonce properties-end-pattern
|
||||
(re-pattern (gstring/format "%s[\t\r ]*\n|(%s\\s*$)" properties-end properties-end)))
|
||||
|
||||
(defn contains-properties?
|
||||
(defn- contains-properties?
|
||||
[content]
|
||||
(when content
|
||||
(and (string/includes? content properties-start)
|
||||
@@ -150,143 +131,12 @@
|
||||
content))
|
||||
content))
|
||||
|
||||
(defn- build-properties-str
|
||||
[format properties]
|
||||
(when (seq properties)
|
||||
(let [org? (= format :org)
|
||||
kv-format (if org? ":%s: %s" (str "%s" colons " %s"))
|
||||
full-format (if org? ":PROPERTIES:\n%s\n:END:" "%s\n")
|
||||
properties-content (->> (map (fn [[k v]] (gstring/format kv-format (name k) v)) properties)
|
||||
(string/join "\n"))]
|
||||
(gstring/format full-format properties-content))))
|
||||
|
||||
(defn simplified-property?
|
||||
[line]
|
||||
(boolean
|
||||
(and (string? line)
|
||||
(re-find (re-pattern (str "^\\s?[^ ]+" colons)) line))))
|
||||
|
||||
(defn- front-matter-property?
|
||||
[line]
|
||||
(boolean
|
||||
(and (string? line)
|
||||
(common-util/safe-re-find #"^\s*[^ ]+:" line))))
|
||||
|
||||
(defn- insert-property-not-org
|
||||
[key* value lines {:keys [front-matter? has-properties? title?]}]
|
||||
(let [exists? (atom false)
|
||||
sym (if front-matter? ": " (str colons " "))
|
||||
new-property-s (str key* sym value)
|
||||
property-f (if front-matter? front-matter-property? simplified-property?)
|
||||
groups (partition-by property-f lines)
|
||||
compose-lines (fn []
|
||||
(mapcat (fn [lines]
|
||||
(if (property-f (first lines))
|
||||
(let [lines (doall
|
||||
(mapv (fn [text]
|
||||
(let [[k v] (common-util/split-first sym text)]
|
||||
(if (and k v)
|
||||
(let [key-exists? (= k key*)
|
||||
_ (when key-exists? (reset! exists? true))
|
||||
v (if key-exists? value v)]
|
||||
(str k sym (string/trim v)))
|
||||
text)))
|
||||
lines))
|
||||
lines (if @exists? lines (conj lines new-property-s))]
|
||||
lines)
|
||||
lines))
|
||||
groups))
|
||||
lines (cond
|
||||
has-properties?
|
||||
(compose-lines)
|
||||
|
||||
title?
|
||||
(cons (first lines) (cons new-property-s (rest lines)))
|
||||
|
||||
:else
|
||||
(cons new-property-s lines))]
|
||||
(string/join "\n" lines)))
|
||||
|
||||
(defn insert-property
|
||||
"Only accept nake content (without any indentation)"
|
||||
([repo format content key value]
|
||||
(insert-property repo format content key value false))
|
||||
([repo format content key value front-matter?]
|
||||
(when (string? content)
|
||||
(let [ast (gp-mldoc/->edn repo content format)
|
||||
title? (gp-mldoc/block-with-title? (ffirst (map first ast)))
|
||||
has-properties? (or (and title?
|
||||
(or (gp-mldoc/properties? (second ast))
|
||||
(gp-mldoc/properties? (second
|
||||
(remove
|
||||
(fn [[x _]]
|
||||
(contains? #{"Hiccup" "Raw_Html"} (first x)))
|
||||
ast)))))
|
||||
(gp-mldoc/properties? (first ast)))
|
||||
lines (string/split-lines content)
|
||||
[title body] (gp-mldoc/get-title&body repo content format)
|
||||
scheduled (filter #(string/starts-with? % "SCHEDULED") lines)
|
||||
deadline (filter #(string/starts-with? % "DEADLINE") lines)
|
||||
body-without-timestamps (filter
|
||||
#(not (or (string/starts-with? % "SCHEDULED")
|
||||
(string/starts-with? % "DEADLINE")))
|
||||
(string/split-lines body))
|
||||
org? (= :org format)
|
||||
key (string/lower-case (name key))
|
||||
value (string/trim (str value))
|
||||
start-idx (.indexOf lines properties-start)
|
||||
end-idx (.indexOf lines properties-end)
|
||||
result (cond
|
||||
(and org? (not has-properties?))
|
||||
(let [properties (build-properties-str format {key value})]
|
||||
(if title
|
||||
(string/join "\n" (concat [title] scheduled deadline [properties] body-without-timestamps))
|
||||
(str properties "\n" content)))
|
||||
|
||||
(and has-properties? (>= start-idx 0) (> end-idx 0) (> end-idx start-idx))
|
||||
(let [exists? (atom false)
|
||||
before (subvec lines 0 start-idx)
|
||||
middle (doall
|
||||
(->> (subvec lines (inc start-idx) end-idx)
|
||||
(mapv (fn [text]
|
||||
(let [[k v] (common-util/split-first ":" (subs text 1))]
|
||||
(if (and k v)
|
||||
(let [key-exists? (= k key)
|
||||
_ (when key-exists? (reset! exists? true))
|
||||
v (if key-exists? value v)]
|
||||
(str ":" k ": " (string/trim v)))
|
||||
text))))))
|
||||
middle (if @exists? middle (conj middle (str ":" key ": " value)))
|
||||
after (subvec lines (inc end-idx))
|
||||
lines (concat before [properties-start] middle [properties-end] after)]
|
||||
(string/join "\n" lines))
|
||||
|
||||
(not org?)
|
||||
(insert-property-not-org key value lines {:has-properties? has-properties?
|
||||
:title? title?
|
||||
:front-matter? front-matter?})
|
||||
|
||||
:else
|
||||
content)]
|
||||
(string/trimr result)))))
|
||||
|
||||
(defn remove-property
|
||||
([format key content]
|
||||
(remove-property format key content true))
|
||||
([format key content first?]
|
||||
(when (not (string/blank? (name key)))
|
||||
(let [format (or format :markdown)
|
||||
key (string/lower-case (name key))
|
||||
remove-f (if first? common-util/remove-first remove)]
|
||||
(if (and (= format :org) (not (contains-properties? content)))
|
||||
content
|
||||
(let [lines (->> (string/split-lines content)
|
||||
(remove-f (fn [line]
|
||||
(let [s (string/triml (string/lower-case line))]
|
||||
(or (string/starts-with? s (str ":" key ":"))
|
||||
(string/starts-with? s (str key colons " ")))))))]
|
||||
(string/join "\n" lines)))))))
|
||||
|
||||
(defn remove-properties
|
||||
[format content]
|
||||
(cond
|
||||
@@ -325,22 +175,4 @@
|
||||
(string/join "\n" lines))
|
||||
|
||||
:else
|
||||
content))
|
||||
|
||||
(defn insert-properties
|
||||
[repo format content kvs]
|
||||
(reduce
|
||||
(fn [content [k v]]
|
||||
(let [k (if (string? k)
|
||||
(keyword (-> (string/lower-case k)
|
||||
(string/replace " " "-")))
|
||||
k)
|
||||
v (if (coll? v)
|
||||
(some->>
|
||||
(seq v)
|
||||
(distinct)
|
||||
(map (fn [item] (page-ref/->page-ref (page-ref/page-ref-un-brackets! item))))
|
||||
(string/join ", "))
|
||||
(if (keyword? v) (name v) v))]
|
||||
(insert-property repo format content k v)))
|
||||
content kvs))
|
||||
content))
|
||||
@@ -1,11 +1,7 @@
|
||||
(ns logseq.graph-parser.test.docs-graph-helper
|
||||
"Helper fns for setting up and running tests against docs graph"
|
||||
(:require ["child_process" :as child-process]
|
||||
["fs" :as fs]
|
||||
[cljs.test :refer [is testing]]
|
||||
[clojure.string :as string]
|
||||
[datascript.core :as d]
|
||||
[logseq.common.config :as common-config]))
|
||||
["fs" :as fs]))
|
||||
|
||||
;; Helper fns for test setup
|
||||
;; =========================
|
||||
@@ -21,150 +17,4 @@
|
||||
[dir branch]
|
||||
(when-not (.existsSync fs dir)
|
||||
(sh ["git" "clone" "--depth" "1" "-b" branch "-c" "advice.detachedHead=false"
|
||||
"https://github.com/logseq/docs" dir] {})))
|
||||
|
||||
;; Fns for common test assertions
|
||||
;; ==============================
|
||||
(defn get-top-block-properties
|
||||
[db]
|
||||
(->> (d/q '[:find (pull ?b [*])
|
||||
:where
|
||||
[?b :block/properties]
|
||||
[(missing? $ ?b :block/name)]]
|
||||
db)
|
||||
(map first)
|
||||
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
|
||||
(apply merge-with +)
|
||||
(filter #(>= (val %) 5))
|
||||
(into {})))
|
||||
|
||||
(defn get-all-page-properties
|
||||
[db]
|
||||
(->> (d/q '[:find (pull ?b [*])
|
||||
:where
|
||||
[?b :block/properties]
|
||||
[?b :block/name]]
|
||||
db)
|
||||
(map first)
|
||||
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
|
||||
(apply merge-with +)
|
||||
(into {})))
|
||||
|
||||
(defn get-block-format-counts
|
||||
[db]
|
||||
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/format]] db)
|
||||
(map first)
|
||||
(group-by :block/format)
|
||||
(map (fn [[k v]] [k (count v)]))
|
||||
(into {})))
|
||||
|
||||
(defn- get-journal-page-count [db]
|
||||
(->> (d/q '[:find (count ?b)
|
||||
:where
|
||||
[?b :block/journal-day]
|
||||
[?b :block/name]
|
||||
[?b :block/file]]
|
||||
db)
|
||||
ffirst))
|
||||
|
||||
(defn- get-counts-for-common-attributes [db]
|
||||
(->> [:block/scheduled :block/priority :block/deadline :block/collapsed?
|
||||
:block/repeated?]
|
||||
(map (fn [attr]
|
||||
[attr
|
||||
(ffirst (d/q [:find (list 'count '?b) :where ['?b attr]]
|
||||
db))]))
|
||||
(into {})))
|
||||
|
||||
(defn- query-assertions
|
||||
[db graph-dir files]
|
||||
(testing "Query based stats"
|
||||
(is (= (->> files
|
||||
;; logseq files aren't saved under :block/file
|
||||
(remove #(string/includes? % (str graph-dir "/" common-config/app-name "/")))
|
||||
;; edn files being listed in docs by parse-graph aren't graph files
|
||||
(remove #(and (not (common-config/whiteboard? %)) (string/ends-with? % ".edn")))
|
||||
set)
|
||||
(->> (d/q '[:find (pull ?b [* {:block/file [:file/path]}])
|
||||
:where [?b :block/name] [?b :block/file]]
|
||||
db)
|
||||
(map (comp #(get-in % [:block/file :file/path]) first))
|
||||
set))
|
||||
"Files on disk should equal ones in db")
|
||||
|
||||
(is (= (count (filter #(re-find #"journals/" %) files))
|
||||
(get-journal-page-count db))
|
||||
"Journal page count on disk equals count in db")
|
||||
|
||||
(is (= {"CANCELED" 2 "DONE" 6 "LATER" 4 "NOW" 5 "WAIT" 1 "IN-PROGRESS" 1 "CANCELLED" 1 "TODO" 19}
|
||||
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/marker]]
|
||||
db)
|
||||
(map first)
|
||||
(group-by :block/marker)
|
||||
(map (fn [[k v]] [k (count v)]))
|
||||
(into {})))
|
||||
"Task marker counts")
|
||||
|
||||
(is (= {:markdown 7372 :org 500} (get-block-format-counts db))
|
||||
"Block format counts")
|
||||
|
||||
(is (= {:rangeincludes 13, :description 137, :updated-at 46, :tags 5, :logseq.order-list-type 16, :query-table 8,
|
||||
:logseq.macro-arguments 105, :parent 14, :logseq.tldraw.shape 79, :card-last-score 5, :card-repeats 5,
|
||||
:name 16, :card-next-schedule 5, :ls-type 79, :card-last-interval 5, :type
|
||||
166, :template 5, :domainincludes 7, :title 114, :alias 62, :supports 6, :id
|
||||
146, :url 30, :card-ease-factor 5, :logseq.macro-name 105, :created-at 46,
|
||||
:card-last-reviewed 5, :platforms 79, :initial-version 16, :heading 332}
|
||||
(get-top-block-properties db))
|
||||
"Counts for top block properties")
|
||||
|
||||
(is (= {:rangeincludes 13, :description 117, :tags 5, :unique 2, :meta 2, :parent 14,
|
||||
:ls-type 1, :type 147, :source 1, :domainincludes 7, :sameas 4, :title 113, :author 1,
|
||||
:alias 62, :logseq.tldraw.page 1, :supports 6, :url 30, :platforms 78,
|
||||
:initial-version 15, :full-title 1}
|
||||
(get-all-page-properties db))
|
||||
"Counts for all page properties")
|
||||
|
||||
(is (= {:block/scheduled 2
|
||||
:block/priority 4
|
||||
:block/deadline 1
|
||||
:block/collapsed? 90
|
||||
:block/repeated? 1}
|
||||
(get-counts-for-common-attributes db))
|
||||
"Counts for blocks with common block attributes")
|
||||
|
||||
(let [no-name (->> (d/q '[:find (pull ?n [*]) :where [?b :block/namespace ?n]] db)
|
||||
(filter (fn [x]
|
||||
(when-not (:block/title (first x))
|
||||
x))))
|
||||
all-namespaces (->> (d/q '[:find (pull ?n [*]) :where [?b :block/namespace ?n]] db)
|
||||
(map (comp :block/title first))
|
||||
set)]
|
||||
(is (= #{"term" "setting" "book" "templates" "page" "Community" "Tweet"
|
||||
"Whiteboard" "Whiteboard/Tool" "Whiteboard/Tool/Shape" "Whiteboard/Object" "Whiteboard/Action Bar"}
|
||||
all-namespaces)
|
||||
(str "Has correct namespaces: " no-name)))
|
||||
|
||||
(is (empty? (->> (d/q '[:find ?n :where [?b :block/name ?n]] db)
|
||||
(map first)
|
||||
(filter #(string/includes? % "___"))))
|
||||
"Block names don't have the slash/triple-lowbar delimiter")))
|
||||
|
||||
(defn docs-graph-assertions
|
||||
"These are common assertions that should pass in both graph-parser and main
|
||||
logseq app. It is important to run these in both contexts to ensure that the
|
||||
functionality in frontend.handler.repo and logseq.graph-parser remain the
|
||||
same"
|
||||
[db graph-dir files]
|
||||
;; Counts assertions help check for no major regressions. These counts should
|
||||
;; only increase over time as the docs graph rarely has deletions
|
||||
(testing "Counts"
|
||||
(is (= 339 (count files)) "Correct file count")
|
||||
(is (= 33
|
||||
(ffirst
|
||||
(d/q '[:find (count ?b)
|
||||
:where [?b :block/title ?content]
|
||||
[(clojure.string/includes? ?content "+BEGIN_QUERY")]]
|
||||
db)))
|
||||
"Advanced query count"))
|
||||
|
||||
(query-assertions db graph-dir files))
|
||||
"https://github.com/logseq/docs" dir] {})))
|
||||
@@ -27,86 +27,6 @@
|
||||
"hello\n:PROPERTIES:\n:foo: bar\n:nice\n:END:\nnice"
|
||||
"hello\nfoo:: bar\n:nice\nnice"))
|
||||
|
||||
(deftest test-insert-property
|
||||
(are [x y] (= x y)
|
||||
(gp-property/insert-property test-db :org "hello" "a" "b")
|
||||
"hello\n:PROPERTIES:\n:a: b\n:END:"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello" "a" false)
|
||||
"hello\n:PROPERTIES:\n:a: false\n:END:"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello\n:PROPERTIES:\n:a: b\n:END:\n" "c" "d")
|
||||
"hello\n:PROPERTIES:\n:a: b\n:c: d\n:END:"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello\n:PROPERTIES:\n:a: b\n:END:\nworld\n" "c" "d")
|
||||
"hello\n:PROPERTIES:\n:a: b\n:c: d\n:END:\nworld"
|
||||
|
||||
(gp-property/insert-property test-db :org "#+BEGIN_QUOTE
|
||||
hello world
|
||||
#+END_QUOTE" "c" "d")
|
||||
":PROPERTIES:\n:c: d\n:END:\n#+BEGIN_QUOTE\n hello world\n #+END_QUOTE"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello
|
||||
DEADLINE: <2021-10-25 Mon>
|
||||
SCHEDULED: <2021-10-25 Mon>" "a" "b")
|
||||
"hello\nSCHEDULED: <2021-10-25 Mon>\nDEADLINE: <2021-10-25 Mon>\n:PROPERTIES:\n:a: b\n:END:"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello
|
||||
DEADLINE: <2021-10-25 Mon>
|
||||
SCHEDULED: <2021-10-25 Mon>\n:PROPERTIES:\n:a: b\n:END:\n" "c" "d")
|
||||
"hello\nDEADLINE: <2021-10-25 Mon>\nSCHEDULED: <2021-10-25 Mon>\n:PROPERTIES:\n:a: b\n:c: d\n:END:"
|
||||
|
||||
(gp-property/insert-property test-db :org "hello
|
||||
DEADLINE: <2021-10-25 Mon>
|
||||
SCHEDULED: <2021-10-25 Mon>\n:PROPERTIES:\n:a: b\n:END:\nworld\n" "c" "d")
|
||||
"hello\nDEADLINE: <2021-10-25 Mon>\nSCHEDULED: <2021-10-25 Mon>\n:PROPERTIES:\n:a: b\n:c: d\n:END:\nworld"
|
||||
|
||||
(gp-property/insert-property test-db :markdown "hello\na:: b\nworld\n" "c" "d")
|
||||
"hello\na:: b\nc:: d\nworld"
|
||||
|
||||
(gp-property/insert-property test-db :markdown "> quote" "c" "d")
|
||||
"c:: d\n> quote"
|
||||
|
||||
(gp-property/insert-property test-db :markdown "#+BEGIN_QUOTE
|
||||
hello world
|
||||
#+END_QUOTE" "c" "d")
|
||||
"c:: d\n#+BEGIN_QUOTE\n hello world\n #+END_QUOTE"))
|
||||
|
||||
(deftest test-insert-properties
|
||||
(are [x y] (= x y)
|
||||
(gp-property/insert-properties test-db :markdown "" {:foo "bar"})
|
||||
"foo:: bar"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "" {"foo" "bar"})
|
||||
"foo:: bar"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "" {"foo space" "bar"})
|
||||
"foo-space:: bar"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "" {:foo #{"bar" "baz"}})
|
||||
"foo:: [[bar]], [[baz]]"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "" {:foo ["bar" "bar" "baz"]})
|
||||
"foo:: [[bar]], [[baz]]"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "a\nb\n" {:foo ["bar" "bar" "baz"]})
|
||||
"a\nfoo:: [[bar]], [[baz]]\nb"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "" {:foo "\"bar, baz\""})
|
||||
"foo:: \"bar, baz\""
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "abcd\nempty::" {:id "123" :foo "bar"})
|
||||
"abcd\nempty::\nid:: 123\nfoo:: bar"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "abcd\nempty:: " {:id "123" :foo "bar"})
|
||||
"abcd\nempty:: \nid:: 123\nfoo:: bar"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "abcd\nempty::" {:id "123"})
|
||||
"abcd\nempty::\nid:: 123"
|
||||
|
||||
(gp-property/insert-properties test-db :markdown "abcd\nempty::\nanother-empty::" {:id "123"})
|
||||
"abcd\nempty::\nanother-empty::\nid:: 123"))
|
||||
|
||||
(deftest test-remove-properties
|
||||
(testing "properties with non-blank lines"
|
||||
(are [x y] (= x y)
|
||||
|
||||
41
deps/outliner/src/logseq/outliner/core.cljs
vendored
41
deps/outliner/src/logseq/outliner/core.cljs
vendored
@@ -5,7 +5,6 @@
|
||||
[clojure.walk :as walk]
|
||||
[datascript.core :as d]
|
||||
[datascript.impl.entity :as de :refer [Entity]]
|
||||
[logseq.common.config :as common-config]
|
||||
[logseq.common.util :as common-util]
|
||||
[logseq.common.util.page-ref :as page-ref]
|
||||
[logseq.common.uuid :as common-uuid]
|
||||
@@ -14,8 +13,6 @@
|
||||
[logseq.db.frontend.class :as db-class]
|
||||
[logseq.db.frontend.schema :as db-schema]
|
||||
[logseq.db.sqlite.create-graph :as sqlite-create-graph]
|
||||
[logseq.db.sqlite.util :as sqlite-util]
|
||||
[logseq.graph-parser.block :as gp-block]
|
||||
[logseq.outliner.batch-tx :include-macros true :as batch-tx]
|
||||
[logseq.outliner.datascript :as ds]
|
||||
[logseq.outliner.pipeline :as outliner-pipeline]
|
||||
@@ -130,43 +127,9 @@
|
||||
|
||||
(declare move-blocks)
|
||||
|
||||
(defn- file-rebuild-block-refs
|
||||
[repo db date-formatter {:block/keys [properties] :as block}]
|
||||
(let [property-key-refs (->> (keys properties)
|
||||
(keep (fn [property-id]
|
||||
(:block/uuid (ldb/get-page db (name property-id))))))
|
||||
property-value-refs (->> (vals properties)
|
||||
(mapcat (fn [v]
|
||||
(cond
|
||||
(and (coll? v) (uuid? (first v)))
|
||||
v
|
||||
|
||||
(uuid? v)
|
||||
(when-let [_entity (d/entity db [:block/uuid v])]
|
||||
[v])
|
||||
|
||||
(and (coll? v) (string? (first v)))
|
||||
(mapcat #(gp-block/extract-refs-from-text repo db % date-formatter) v)
|
||||
|
||||
(string? v)
|
||||
(gp-block/extract-refs-from-text repo db v date-formatter)
|
||||
|
||||
:else
|
||||
nil))))
|
||||
property-refs (->> (concat property-key-refs property-value-refs)
|
||||
(map (fn [id-or-map] (if (uuid? id-or-map) {:block/uuid id-or-map} id-or-map)))
|
||||
(remove (fn [b] (nil? (d/entity db [:block/uuid (:block/uuid b)])))))
|
||||
content-refs (when-let [content (:block/title block)]
|
||||
(let [format (or (:block/format block) :markdown)
|
||||
content' (str (common-config/get-block-pattern format) " " content)]
|
||||
(gp-block/extract-refs-from-text repo db content' date-formatter)))]
|
||||
(concat property-refs content-refs)))
|
||||
|
||||
(defn ^:api rebuild-block-refs
|
||||
[repo db date-formatter block]
|
||||
(if (sqlite-util/db-based-graph? repo)
|
||||
(outliner-pipeline/db-rebuild-block-refs db block)
|
||||
(file-rebuild-block-refs repo db date-formatter block)))
|
||||
[db block]
|
||||
(outliner-pipeline/db-rebuild-block-refs db block))
|
||||
|
||||
(defn- fix-tag-ids
|
||||
"Fix or remove tags related when entered via `Escape`"
|
||||
|
||||
@@ -78,7 +78,6 @@
|
||||
["deps/db/src/logseq/db/file_based"
|
||||
"deps/graph-parser/src/logseq/graph_parser/extract.cljc"
|
||||
"deps/graph-parser/src/logseq/graph_parser/property.cljs"
|
||||
"deps/graph-parser/src/logseq/graph_parser.cljs"
|
||||
"src/main/frontend/fs"
|
||||
"src/main/frontend/util/fs.cljs"])
|
||||
|
||||
|
||||
@@ -246,9 +246,7 @@
|
||||
(when-not @*publishing? (common-sqlite/create-kvs-table! client-ops-db))
|
||||
(rtc-debug-log/create-tables! debug-log-db)
|
||||
(search/create-tables-and-triggers! search-db)
|
||||
(ldb/register-transact-pipeline-fn!
|
||||
(fn [tx-report]
|
||||
(worker-pipeline/transact-pipeline repo tx-report)))
|
||||
(ldb/register-transact-pipeline-fn! worker-pipeline/transact-pipeline)
|
||||
(let [schema (ldb/get-schema repo)
|
||||
conn (common-sqlite/get-storage-conn storage schema)
|
||||
_ (db-fix/check-and-fix-schema! repo conn)
|
||||
|
||||
@@ -35,14 +35,13 @@
|
||||
(:undo? tx-meta) (:redo? tx-meta)))))
|
||||
|
||||
(defn- rebuild-block-refs
|
||||
[repo {:keys [tx-meta db-after db-before]} blocks]
|
||||
[{:keys [tx-meta db-after db-before]} blocks]
|
||||
(when (or (and (:outliner-op tx-meta) (refs-need-recalculated? tx-meta))
|
||||
(:rtc-tx? tx-meta)
|
||||
(:rtc-op? tx-meta))
|
||||
(mapcat (fn [block]
|
||||
(when (d/entity db-after (:db/id block))
|
||||
(let [date-formatter (worker-state/get-date-formatter repo)
|
||||
refs (->> (outliner-core/rebuild-block-refs repo db-after date-formatter block) set)
|
||||
(let [refs (->> (outliner-core/rebuild-block-refs db-after block) set)
|
||||
old-refs (->> (:block/refs (d/entity db-before (:db/id block)))
|
||||
(map :db/id)
|
||||
set)
|
||||
@@ -464,7 +463,7 @@
|
||||
(defn transact-pipeline
|
||||
"Compute extra tx-data and block/refs, should ensure it's a pure function and
|
||||
doesn't call `d/transact!` or `ldb/transact!`."
|
||||
[repo {:keys [db-after tx-meta] :as tx-report}]
|
||||
[{:keys [db-after tx-meta] :as tx-report}]
|
||||
(let [extra-tx-data (compute-extra-tx-data tx-report)
|
||||
tx-report* (if (seq extra-tx-data)
|
||||
(let [result (d/with db-after extra-tx-data)]
|
||||
@@ -477,7 +476,7 @@
|
||||
deleted-block-ids (set (map :db/id deleted-blocks))
|
||||
blocks' (remove (fn [b] (deleted-block-ids (:db/id b))) blocks)
|
||||
block-refs (when (seq blocks')
|
||||
(rebuild-block-refs repo tx-report* blocks'))
|
||||
(rebuild-block-refs tx-report* blocks'))
|
||||
tx-id-data (let [db-after (:db-after tx-report*)
|
||||
updated-blocks (remove (fn [b] (contains? deleted-block-ids (:db/id b)))
|
||||
(concat pages blocks))
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
(ns frontend.format.mldoc-test
|
||||
(:require [logseq.graph-parser.block :as gp-block]
|
||||
[cljs.test :refer [deftest testing are]]
|
||||
[frontend.test.helper :as test-helper]))
|
||||
|
||||
(deftest test-extract-plain
|
||||
(testing "normalize date values"
|
||||
(are [x y] (= (gp-block/extract-plain test-helper/test-db x) y)
|
||||
"foo #book #[[nice test]]"
|
||||
"foo"
|
||||
|
||||
"foo #book #[[nice test]]"
|
||||
"foo"
|
||||
|
||||
"**foo** #book #[[nice test]]"
|
||||
"foo"
|
||||
|
||||
"foo [[bar]] #book #[[nice test]]"
|
||||
"foo [[bar]]"
|
||||
|
||||
"foo [[bar]] #book #[[nice test]]"
|
||||
"foo [[bar]]"
|
||||
|
||||
"[[foo bar]]"
|
||||
"foo bar"
|
||||
|
||||
"[[Foo Bar]]"
|
||||
"Foo Bar"
|
||||
|
||||
"[[Foo [[Bar]]]]"
|
||||
"Foo [[Bar]]"
|
||||
|
||||
"foo [[Foo [[Bar]]]]"
|
||||
"foo [[Foo [[Bar]]]]"
|
||||
|
||||
"foo [[Foo [[Bar]]]] #tag"
|
||||
"foo [[Foo [[Bar]]]]")))
|
||||
@@ -35,9 +35,7 @@
|
||||
[& {:keys [build-init-data?] :or {build-init-data? true} :as opts}]
|
||||
(state/set-current-repo! test-db)
|
||||
(conn/start! test-db opts)
|
||||
(ldb/register-transact-pipeline-fn!
|
||||
(fn [tx-report]
|
||||
(worker-pipeline/transact-pipeline test-db tx-report)))
|
||||
(ldb/register-transact-pipeline-fn! worker-pipeline/transact-pipeline)
|
||||
(let [conn (conn/get-db test-db false)]
|
||||
(when build-init-data? (d/transact! conn (sqlite-create-graph/build-db-initial-data config/config-default-content)))
|
||||
(d/listen! conn ::listen-db-changes!
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
(ns frontend.worker.pipeline-test
|
||||
(:require [cljs.test :refer [deftest is testing]]
|
||||
[datascript.core :as d]
|
||||
[frontend.test.helper :as test-helper]
|
||||
[frontend.worker.pipeline :as worker-pipeline]
|
||||
[logseq.db :as ldb]
|
||||
[logseq.db.test.helper :as db-test]))
|
||||
@@ -42,8 +41,7 @@
|
||||
(set (#'worker-pipeline/remove-conflict-datoms datoms)))))))
|
||||
|
||||
(deftest test-built-in-page-updates-that-should-be-reverted
|
||||
(let [graph test-helper/test-db
|
||||
conn (db-test/create-conn-with-blocks
|
||||
(let [conn (db-test/create-conn-with-blocks
|
||||
[{:page {:block/title "page1"}
|
||||
:blocks [{:block/title "b1"}
|
||||
{:block/title "b2" :build/tags [:tag1]}]}])
|
||||
@@ -51,7 +49,7 @@
|
||||
|
||||
(ldb/register-transact-pipeline-fn!
|
||||
(fn [tx-report]
|
||||
(worker-pipeline/transact-pipeline graph tx-report)))
|
||||
(worker-pipeline/transact-pipeline tx-report)))
|
||||
|
||||
(testing "Using built-in pages as tags"
|
||||
(let [page-1 (ldb/get-page @conn "page1")
|
||||
@@ -95,8 +93,7 @@
|
||||
(ldb/register-transact-pipeline-fn! identity)))
|
||||
|
||||
(deftest ensure-query-property-on-tag-additions-test
|
||||
(let [graph test-helper/test-db
|
||||
conn (db-test/create-conn-with-blocks
|
||||
(let [conn (db-test/create-conn-with-blocks
|
||||
{:pages-and-blocks [{:page {:block/title "page1"}
|
||||
:blocks [{:block/title "b1"}
|
||||
{:block/title "b2"}]}]
|
||||
@@ -106,9 +103,7 @@
|
||||
b1 (some #(when (= "b1" (:block/title %)) %) blocks)
|
||||
b2 (some #(when (= "b2" (:block/title %)) %) blocks)
|
||||
query-child (ldb/get-page @conn "QueryChild")]
|
||||
(ldb/register-transact-pipeline-fn!
|
||||
(fn [tx-report]
|
||||
(worker-pipeline/transact-pipeline graph tx-report)))
|
||||
(ldb/register-transact-pipeline-fn! worker-pipeline/transact-pipeline)
|
||||
|
||||
(testing "tagging with #Query adds query property"
|
||||
(ldb/transact! conn [[:db/add (:db/id b1) :block/tags :logseq.class/Query]])
|
||||
|
||||
Reference in New Issue
Block a user