Merge pull request #5420 from logseq/enhance/graph-parser-part-four

Enhance: Finish up graph-parser
This commit is contained in:
Tienson Qin
2022-06-02 10:00:30 +08:00
committed by GitHub
81 changed files with 1536 additions and 851 deletions

View File

@@ -5,7 +5,5 @@
;; Ignore b/c too many false positives
frontend.db
;; Used for debugging
frontend.db.debug
;; carve doesn't detect nbb only usage
logseq.graph-parser.log]
frontend.db.debug]
:report {:format :ignore}}

View File

@@ -2,13 +2,7 @@
{:unresolved-symbol {:exclude [goog.DEBUG
goog.string.unescapeEntities
;; TODO:lint: Fix when fixing all type hints
object
;; TODO: Remove parse-* and update-* when https://github.com/clj-kondo/clj-kondo/issues/1694 is done
parse-long
parse-double
parse-uuid
update-keys
update-vals]}
object]}
;; TODO:lint: Remove node-path excludes once we have a cleaner api
:unresolved-var {:exclude [frontend.util/node-path.basename
frontend.util/node-path.dirname
@@ -26,10 +20,12 @@
frontend.db.query-react query-react
frontend.util util
frontend.util.property property
frontend.util.text text-util
frontend.config config
frontend.format.mldoc mldoc
frontend.format.block block
frontend.handler.extract extract
logseq.graph-parser graph-parser
logseq.graph-parser.text text
logseq.graph-parser.block gp-block
logseq.graph-parser.mldoc gp-mldoc
@@ -39,12 +35,10 @@
logseq.graph-parser.date-time-util date-time-util}}}
:hooks {:analyze-call {rum.core/defc hooks.rum/defc
rum.core/defcs hooks.rum/defcs}}
rum.core/defcs hooks.rum/defcs}}
:lint-as {promesa.core/let clojure.core/let
promesa.core/loop clojure.core/loop
promesa.core/recur clojure.core/recur
garden.def/defstyles clojure.core/def
garden.def/defkeyframes clojure.core/def
rum.core/defcc rum.core/defc
rum.core/defcontext clojure.core/def
clojure.test.check.clojure-test/defspec clojure.core/def
@@ -55,4 +49,5 @@
frontend.namespaces/import-vars potemkin/import-vars
;; src/test
frontend.react/defc clojure.core/defn}
:skip-comments true}
:skip-comments true
:output {:progress true}}

View File

@@ -79,11 +79,9 @@ jobs:
yarn cljs:test
node static/tests.js
- name: Run nbb tests for graph-parser
run: yarn nbb-logseq -cp src/main:src/test -m logseq.graph-parser.nbb-test-runner/run-tests
# In this job because it depends on an npm package
- name: Load nbb compatible namespaces
run: bb test:load-nbb-compatible-namespaces
run: bb test:load-namespaces-with-nbb
lint:
runs-on: ubuntu-latest

113
.github/workflows/graph-parser.yml vendored Normal file
View File

@@ -0,0 +1,113 @@
name: logseq graph-parser CI
on:
# Path filters ensure jobs only kick off if a change is made to graph-parser
push:
branches: [master]
paths:
- 'deps/graph-parser/**'
- '!deps/graph-parser/**.md'
pull_request:
branches: [master]
paths:
- 'deps/graph-parser/**'
- '!deps/graph-parser/**.md'
env:
CLOJURE_VERSION: '1.10.1.727'
# setup-java@v2 dropped support for legacy Java version syntax.
# This is the same as 1.8.
JAVA_VERSION: '8'
# This is the latest node version we can run.
NODE_VERSION: '16'
BABASHKA_VERSION: '0.8.2'
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Node
uses: actions/setup-node@v2
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'yarn'
cache-dependency-path: deps/graph-parser/yarn.lock
- name: Set up Java
uses: actions/setup-java@v2
with:
distribution: 'zulu'
java-version: ${{ env.JAVA_VERSION }}
- name: Set up Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: ${{ env.CLOJURE_VERSION }}
- name: Setup Babashka
uses: turtlequeue/setup-babashka@v1.3.0
with:
babashka-version: ${{ env.BABASHKA_VERSION }}
- name: Clojure cache
uses: actions/cache@v2
id: clojure-deps
with:
path: |
~/.m2/repository
~/.gitlibs
key: ${{ runner.os }}-clojure-deps-${{ hashFiles('deps.edn') }}
restore-keys: ${{ runner.os }}-clojure-deps-
- name: Fetch Clojure deps
if: steps.clojure-deps.outputs.cache-hit != 'true'
run: cd deps/graph-parser && clojure -A:test -P
- name: Fetch yarn deps
run: cd deps/graph-parser && yarn install --frozen-lockfile
- name: Run ClojureScript tests
run: cd deps/graph-parser && clojure -M:test
- name: Run nbb-logseq tests
run: cd deps/graph-parser && yarn nbb-logseq -cp src:test -m logseq.graph-parser.nbb-test-runner/run-tests
# In this job because it depends on an npm package
- name: Load namespaces into nbb-logseq
run: bb test:load-all-namespaces-with-nbb deps/graph-parser src
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Java
uses: actions/setup-java@v2
with:
distribution: 'zulu'
java-version: ${{ env.JAVA_VERSION }}
- name: Set up Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: ${{ env.CLOJURE_VERSION }}
- name: Setup Babashka
uses: turtlequeue/setup-babashka@v1.3.0
with:
babashka-version: ${{ env.BABASHKA_VERSION }}
- name: Run clj-kondo lint
run: cd deps/graph-parser && clojure -M:clj-kondo --parallel --lint src test
- name: Carve lint for unused vars
run: cd deps/graph-parser && ../../scripts/carve.clj
- name: Lint for vars that are too large
run: scripts/large_vars.clj deps/graph-parser/src '{:max-lines-count 75}'

View File

@@ -46,7 +46,11 @@ After cloning the [Logseq repository](https://github.com/logseq/logseq), there a
- `src/main/frontend/` contains code that powers the Logseq editor. Folders and files inside are organized by features or functions. For example, `components` contains all the UI components and `handler` contains all the event-handling code. You can explore on your own interest.
- `src/main/logseq/` contains the api used by plugins and the graph-parser.
- `src/main/logseq/` contains the api used by plugins.
- `deps/` contains dependencies or libraries used by the frontend.
- `deps/graph-parser/` is a library that parses a Logseq graph and saves it to a database.
## Data Flow

22
bb.edn
View File

@@ -2,9 +2,16 @@
:deps
{org.babashka/spec.alpha
{:git/url "https://github.com/babashka/spec.alpha"
:sha "1a841c4cc1d4f6dab7505a98ed2d532dd9d56b78"}}
:sha "1a841c4cc1d4f6dab7505a98ed2d532dd9d56b78"}
logseq/bb-tasks
#_{:local/root "../bb-tasks"}
{:git/url "https://github.com/logseq/bb-tasks"
:git/sha "4b3e623fb475cacb992425aa9dac770d6dd63e38"}
logseq/graph-parser
{:local/root "deps/graph-parser"}}
:pods
{clj-kondo/clj-kondo {:version "2022.02.09"}}
{clj-kondo/clj-kondo {:version "2022.02.09"}
org.babashka/fswatcher {:version "0.0.3"}}
:tasks
{dev:watch
logseq.tasks.dev/watch
@@ -26,9 +33,18 @@
dev:lint
logseq.tasks.dev/lint
test:load-nbb-compatible-namespaces
nbb:watch
logseq.bb-tasks.nbb.watch/watch
nbb:portal-watch
logseq.bb-tasks.nbb.watch/portal-watch
test:load-namespaces-with-nbb
logseq.tasks.nbb/load-compatible-namespaces
test:load-all-namespaces-with-nbb
logseq.tasks.nbb/load-all-namespaces
lang:list
logseq.tasks.lang/list-langs

View File

@@ -29,8 +29,8 @@
camel-snake-kebab/camel-snake-kebab {:mvn/version "0.4.2"}
instaparse/instaparse {:mvn/version "1.4.10"}
nubank/workspaces {:mvn/version "1.1.1"}
frankiesardo/linked {:mvn/version "1.3.0"}
org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"}}
org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"}
logseq/graph-parser {:local/root "deps/graph-parser"}}
:aliases {:cljs {:extra-paths ["src/dev-cljs/" "src/test/" "src/electron/"]
:extra-deps {org.clojure/clojurescript {:mvn/version "1.11.54"}
@@ -47,5 +47,5 @@
:main-opts ["-m" "shadow.cljs.devtools.cli"]}
;; Use :replace-deps for tools. See https://github.com/clj-kondo/clj-kondo/issues/1536#issuecomment-1013006889
:clj-kondo {:replace-deps {clj-kondo/clj-kondo {:mvn/version "2022.01.13"}}
:clj-kondo {:replace-deps {clj-kondo/clj-kondo {:mvn/version "2022.05.28"}}
:main-opts ["-m" "clj-kondo.main"]}}}

9
deps/graph-parser/.carve/config.edn vendored Normal file
View File

@@ -0,0 +1,9 @@
{:paths ["src"]
:api-namespaces [
;; carve doesn't detect nbb only usage
logseq.graph-parser.log
;; Used by logseq but not worth splitting up
logseq.graph-parser.db.schema
;; Used in tests
logseq.graph-parser.test.docs-graph-helper]
:report {:format :ignore}}

4
deps/graph-parser/.carve/ignore vendored Normal file
View File

@@ -0,0 +1,4 @@
;; For CLI
logseq.graph-parser.cli/parse-graph
;; For CLI
logseq.graph-parser.mldoc/ast-export-markdown

13
deps/graph-parser/.clj-kondo/config.edn vendored Normal file
View File

@@ -0,0 +1,13 @@
{:linters
{:consistent-alias
{:aliases {datascript.core d
logseq.graph-parser graph-parser
logseq.graph-parser.text text
logseq.graph-parser.block gp-block
logseq.graph-parser.mldoc gp-mldoc
logseq.graph-parser.util gp-util
logseq.graph-parser.property gp-property
logseq.graph-parser.config gp-config
logseq.graph-parser.date-time-util date-time-util}}}
:skip-comments true
:output {:progress true}}

3
deps/graph-parser/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/.clj-kondo/.cache
cljs-test-runner-out
/test/docs

63
deps/graph-parser/README.md vendored Normal file
View File

@@ -0,0 +1,63 @@
## Description
This library parses a logseq graph directory and returns it as a datascript
database connection. This library powers the Logseq app and also runs from the
commandline, _independent_ of the app. This is powerful as this can run anywhere
that a Node.js script has access to a Logseq graph e.g. on CI processes like
Github Actions. This library is compatible with ClojureScript and with
[nbb-logseq](https://github.com/logseq/nbb-logseq) to respectively provide
frontend and commandline functionality.
## API
This library is under the parent namespace `logseq.graph-parser`. This library
provides two main namespaces for parsing, `logseq.graph-parser` and
`logseq.graph-parser.cli`. `logseq.graph-parser/parse-file` is the main fn for
the frontend. `logseq.graph-parser.cli/parse-graph` is the main fn for node.js
CLIs.
## Usage
See `logseq.graph-parser.cli-test` for now. A real world example is coming soon.
## Dev
This follows the practices that [the Logseq frontend
follows](/docs/dev-practices.md). Most of the same linters are used, with
configurations that are specific to this library. See [this library's CI
file](/.github/workflows/graph-parser.yml) for linting examples.
### Setup
To run linters and tests, you'll want to install yarn dependencies once:
```
yarn install
```
This step is not needed if you're just running the application.
### Testing
Since this file is compatible with cljs and nbb-logseq, tests are run against both languages.
ClojureScript tests use https://github.com/Olical/cljs-test-runner. To run tests:
```
clojure -M:test
```
To see available options that can run specific tests or namespaces: `clojure -M:test --help`
To run nbb-logseq tests:
```
yarn nbb-logseq -cp src:test -m logseq.graph-parser.nbb-test-runner/run-tests
```
### Managing dependencies
The package.json dependencies are just for testing and should be updated if there is
new behavior to test.
The deps.edn dependecies are used by both ClojureScript and nbb-logseq. Their
versions should be backwards compatible with each other with priority given to
the frontend. _No new dependency_ should be introduced to this library without
an understanding of the tradeoffs of adding this to nbb-logseq.

23
deps/graph-parser/deps.edn vendored Normal file
View File

@@ -0,0 +1,23 @@
{:paths ["src"]
:deps
;; Deps should be kept in sync with https://github.com/logseq/nbb-logseq/blob/main/bb.edn
{datascript/datascript {:mvn/version "1.3.8"}
frankiesardo/linked {:mvn/version "1.3.0"}
com.andrewmcveigh/cljs-time {:git/url "https://github.com/logseq/cljs-time" ;; fork
:sha "5704fbf48d3478eedcf24d458c8964b3c2fd59a9"}
;; stubbed in nbb
com.lambdaisland/glogi {:mvn/version "1.1.144"}
;; built in to nbb
cljs-bean/cljs-bean {:mvn/version "1.5.0"}}
:aliases
;; This runs tests with nodejs. Would be nice to run this with in a browser env
;; since this is how its normally run in the app but this requires more setup
;; with karma, shadow-cljs.edn and headless mode on CI
{:test {:extra-paths ["test"]
:extra-deps {olical/cljs-test-runner {:mvn/version "3.8.0"}
org.clojure/clojurescript {:mvn/version "1.11.54"}}
:main-opts ["-m" "cljs-test-runner.main"]}
:clj-kondo {:replace-deps {clj-kondo/clj-kondo {:mvn/version "2022.05.28"}}
:main-opts ["-m" "clj-kondo.main"]}}}

11
deps/graph-parser/package.json vendored Normal file
View File

@@ -0,0 +1,11 @@
{
"name": "@logseq/graph-parser",
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^0.5.103"
},
"dependencies": {
"mldoc": "^1.3.3"
}
}

View File

@@ -0,0 +1,70 @@
(ns logseq.graph-parser
"Main ns used by logseq app to parse graph from source files"
(:require [datascript.core :as d]
[logseq.graph-parser.extract :as extract]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.date-time-util :as date-time-util]
[logseq.graph-parser.config :as gp-config]
[clojure.string :as string]
[clojure.set :as set]))
(defn- db-set-file-content!
"Modified copy of frontend.db.model/db-set-file-content!"
[conn path content]
(let [tx-data {:file/path path
:file/content content}]
(d/transact! conn [tx-data] {:skip-refresh? true})))
(defn parse-file
"Parse file and save parsed data to the given db. Main parse fn used by logseq app"
[conn file content {:keys [new? delete-blocks-fn extract-options]
:or {new? true
delete-blocks-fn (constantly [])}
:as options}]
(db-set-file-content! conn file content)
(let [format (gp-util/get-format file)
file-content [{:file/path file}]
tx (if (contains? gp-config/mldoc-support-formats format)
(let [extract-options' (merge {:block-pattern (gp-config/get-block-pattern format)
:date-formatter "MMM do, yyyy"
:supported-formats (gp-config/supported-formats)}
extract-options
{:db @conn})
[pages blocks]
(extract/extract-blocks-pages file content extract-options')
delete-blocks (delete-blocks-fn (first pages) file)
block-ids (map (fn [block] {:block/uuid (:block/uuid block)}) blocks)
block-refs-ids (->> (mapcat :block/refs blocks)
(filter (fn [ref] (and (vector? ref)
(= :block/uuid (first ref)))))
(map (fn [ref] {:block/uuid (second ref)}))
(seq))
;; To prevent "unique constraint" on datascript
block-ids (set/union (set block-ids) (set block-refs-ids))
pages (extract/with-ref-pages pages blocks)
pages-index (map #(select-keys % [:block/name]) pages)]
;; does order matter?
(concat file-content pages-index delete-blocks pages block-ids blocks))
file-content)
tx (concat tx [(cond-> {:file/path file}
new?
;; TODO: use file system timestamp?
(assoc :file/created-at (date-time-util/time-ms)))])]
(d/transact! conn (gp-util/remove-nils tx) (select-keys options [:new-graph? :from-disk?]))))
(defn filter-files
"Filters files in preparation for parsing. Only includes files that are
supported by parser"
[files]
(let [support-files (filter
(fn [file]
(let [format (gp-util/get-format (:file/path file))]
(contains? (set/union #{:edn :css} gp-config/mldoc-support-formats) format)))
files)
support-files (sort-by :file/path support-files)
{journals true non-journals false} (group-by (fn [file] (string/includes? (:file/path file) "journals/")) support-files)
{built-in true others false} (group-by (fn [file]
(or (string/includes? (:file/path file) "contents.")
(string/includes? (:file/path file) ".edn")
(string/includes? (:file/path file) "custom.css"))) non-journals)]
(concat (reverse journals) built-in others)))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.block
(ns logseq.graph-parser.block
;; Disable clj linters since we don't support clj
#?(:clj {:clj-kondo/config {:linters {:unresolved-namespace {:level :off}
:unresolved-symbol {:level :off}}}})

View File

@@ -0,0 +1,68 @@
(ns logseq.graph-parser.cli
"Primary ns to parse graphs with node.js based CLIs"
(:require ["fs" :as fs]
["child_process" :as child-process]
[clojure.edn :as edn]
[clojure.string :as string]
[logseq.graph-parser :as graph-parser]
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser.db :as gp-db]))
(defn slurp
"Return file contents like clojure.core/slurp"
[file]
(str (fs/readFileSync file)))
(defn sh
"Run shell cmd synchronously and print to inherited streams by default. Aims
to be similar to babashka.tasks/shell
TODO: Fail fast when process exits 1"
[cmd opts]
(child-process/spawnSync (first cmd)
(clj->js (rest cmd))
(clj->js (merge {:stdio "inherit"} opts))))
(defn build-graph-files
"Given a git graph directory, returns allowed file paths and their contents in
preparation for parsing"
[dir]
(let [files (->> (str (.-stdout (sh ["git" "ls-files"]
{:cwd dir :stdio nil})))
string/split-lines
(map #(hash-map :file/path (str dir "/" %)))
graph-parser/filter-files)]
(mapv #(assoc % :file/content (slurp (:file/path %))) files)))
(defn- read-config
"Commandline version of frontend.handler.common/read-config without graceful
handling of broken config. Config is assumed to be at $dir/logseq/config.edn "
[dir]
(let [config-file (str dir "/" gp-config/app-name "/config.edn")]
(if (fs/existsSync config-file)
(-> config-file fs/readFileSync str edn/read-string)
{})))
(defn- parse-files
[conn files {:keys [config] :as options}]
(let [extract-options (merge {:date-formatter (gp-config/get-date-formatter config)}
(select-keys options [:verbose]))]
(doseq [{:file/keys [path content]} files]
(graph-parser/parse-file conn path content {:extract-options extract-options}))))
(defn parse-graph
"Parses a given graph directory and returns a datascript connection and all
files that were processed. The directory is parsed as if it were a new graph
as it can't assume that the metadata in logseq/ is up to date. Directory is
assumed to be using git. This fn takes the following options:
* :verbose - When enabled prints more information during parsing. Defaults to true
* :files - Specific files to parse instead of parsing the whole directory"
([dir]
(parse-graph dir {}))
([dir options]
(let [files (or (:files options) (build-graph-files dir))
conn (gp-db/start-conn)
config (read-config dir)]
(when-not (:files options) (println "Parsing" (count files) "files..."))
(parse-files conn files (merge options {:config config}))
{:conn conn
:files (map :file/path files)})))

View File

@@ -0,0 +1,60 @@
(ns logseq.graph-parser.config
"Config that is shared between graph-parser and rest of app"
(:require [logseq.graph-parser.util :as gp-util]
[clojure.set :as set]
[clojure.string :as string]))
(def app-name
"Copy of frontend.config/app-name. Too small to couple to main app"
"logseq")
(defonce local-assets-dir "assets")
(defn local-asset?
[s]
(gp-util/safe-re-find (re-pattern (str "^[./]*" local-assets-dir)) s))
(defonce default-draw-directory "draws")
(defn draw?
[path]
(string/starts-with? path default-draw-directory))
;; TODO: rename
(defonce mldoc-support-formats
#{:org :markdown :md})
(defn mldoc-support?
[format]
(contains? mldoc-support-formats (keyword format)))
(defn text-formats
[]
#{:json :org :md :yml :dat :asciidoc :rst :txt :markdown :adoc :html :js :ts :edn :clj :ml :rb :ex :erl :java :php :c :css
:excalidraw})
(defn img-formats
[]
#{:gif :svg :jpeg :ico :png :jpg :bmp :webp})
(defn supported-formats
[]
(set/union (text-formats)
(img-formats)))
(defn get-date-formatter
[config]
(or
(:journal/page-title-format config)
;; for compatibility
(:date-formatter config)
"MMM do, yyyy"))
(defn get-block-pattern
[format]
(let [format' (keyword format)]
(case format'
:org
"*"
"-")))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.date-time-util
(ns logseq.graph-parser.date-time-util
"cljs-time util fns for graph-parser"
(:require [cljs-time.coerce :as tc]
[cljs-time.core :as t]
@@ -25,7 +25,7 @@
(fn [formatter]
(try
(tf/parse (tf/formatter formatter) (gp-util/capitalize-all journal-title))
(catch js/Error _e
(catch :default _e
nil)))
formatters)
(filter some?)

View File

@@ -0,0 +1,15 @@
(ns logseq.graph-parser.db
(:require [logseq.graph-parser.db.default :as default-db]
[logseq.graph-parser.db.schema :as db-schema]
[datascript.core :as d]))
(defn start-conn
"Create datascript conn with schema and default data"
[]
(let [db-conn (d/create-conn db-schema/schema)]
(d/transact! db-conn [{:schema/version db-schema/version}
{:block/name "card"
:block/original-name "card"
:block/uuid (d/squuid)}])
(d/transact! db-conn default-db/built-in-pages)
db-conn))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible frontend.db.default
(ns logseq.graph-parser.db.default
(:require [clojure.string :as string]))
(defonce built-in-pages-names

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible frontend.db-schema)
(ns logseq.graph-parser.db.schema)
(defonce version 1)
(defonce ast-version 1)

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.extract
(ns logseq.graph-parser.extract
;; Disable clj linters since we don't support clj
#?(:clj {:clj-kondo/config {:linters {:unresolved-namespace {:level :off}
:unresolved-symbol {:level :off}}}})
@@ -39,12 +39,57 @@
(or first-block-name file-name)
(or file-name first-block-name)))))))
(defn- build-page-entity
[properties file page-name page ref-tags {:keys [date-formatter db]}]
(let [alias (:alias properties)
alias (if (string? alias) [alias] alias)
aliases (and alias
(seq (remove #(or (= page-name (gp-util/page-name-sanity-lc %))
(string/blank? %)) ;; disable blank alias
alias)))
aliases (->>
(map
(fn [alias]
(let [page-name (gp-util/page-name-sanity-lc alias)
aliases (distinct
(conj
(remove #{alias} aliases)
page))
aliases (when (seq aliases)
(map
(fn [alias]
{:block/name (gp-util/page-name-sanity-lc alias)})
aliases))]
(if (seq aliases)
{:block/name page-name
:block/alias aliases}
{:block/name page-name})))
aliases)
(remove nil?))]
(cond->
(gp-util/remove-nils
(assoc
(gp-block/page-name->map page false db true date-formatter)
:block/file {:file/path (gp-util/path-normalize file)}))
(seq properties)
(assoc :block/properties properties)
(seq aliases)
(assoc :block/alias aliases)
(:tags properties)
(assoc :block/tags (let [tags (:tags properties)
tags (if (string? tags) [tags] tags)
tags (remove string/blank? tags)]
(swap! ref-tags set/union (set tags))
(map (fn [tag] {:block/name (gp-util/page-name-sanity-lc tag)
:block/original-name tag})
tags))))))
;; TODO: performance improvement
(defn- extract-pages-and-blocks
[format ast properties file content {:keys [date-formatter page-name-order db] :as options}]
(try
#_:clj-kondo/ignore ;;clj-kondo bug
(let [page (get-page-name file ast page-name-order)
[_original-page-name page-name _journal-day] (gp-block/convert-page-if-journal page date-formatter)
blocks (->> (gp-block/extract-blocks ast content false format (dissoc options :page-name-order))
@@ -65,50 +110,7 @@
:block/refs block-ref-pages
:block/path-refs block-path-ref-pages))))
blocks)
page-entity (let [alias (:alias properties)
alias (if (string? alias) [alias] alias)
aliases (and alias
(seq (remove #(or (= page-name (gp-util/page-name-sanity-lc %))
(string/blank? %)) ;; disable blank alias
alias)))
aliases (->>
(map
(fn [alias]
(let [page-name (gp-util/page-name-sanity-lc alias)
aliases (distinct
(conj
(remove #{alias} aliases)
page))
aliases (when (seq aliases)
(map
(fn [alias]
{:block/name (gp-util/page-name-sanity-lc alias)})
aliases))]
(if (seq aliases)
{:block/name page-name
:block/alias aliases}
{:block/name page-name})))
aliases)
(remove nil?))]
(cond->
(gp-util/remove-nils
(assoc
(gp-block/page-name->map page false db true date-formatter)
:block/file {:file/path (gp-util/path-normalize file)}))
(seq properties)
(assoc :block/properties properties)
(seq aliases)
(assoc :block/alias aliases)
(:tags properties)
(assoc :block/tags (let [tags (:tags properties)
tags (if (string? tags) [tags] tags)
tags (remove string/blank? tags)]
(swap! ref-tags set/union (set tags))
(map (fn [tag] {:block/name (gp-util/page-name-sanity-lc tag)
:block/original-name tag})
tags)))))
page-entity (build-page-entity properties file page-name page ref-tags options)
namespace-pages (let [page (:block/original-name page-entity)]
(when (text/namespace-page? page)
(->> (gp-util/split-namespace-pages page)
@@ -137,16 +139,16 @@
(log/error :exception e))))
(defn extract-blocks-pages
[file content {:keys [user-config] :as options}]
[file content {:keys [user-config verbose] :or {verbose true} :as options}]
(if (string/blank? content)
[]
(let [format (gp-util/get-format file)
_ (println "Parsing start: " file)
_ (when verbose (println "Parsing start: " file))
ast (gp-mldoc/->edn content (gp-mldoc/default-config format
;; {:parse_outline_only? true}
)
user-config)]
(println "Parsing finished : " file)
(when verbose (println "Parsing finished: " file))
(let [first-block (ffirst ast)
properties (let [properties (and (gp-property/properties-ast? first-block)
(->> (last first-block)
@@ -184,6 +186,9 @@
(map (partial apply merge))
(with-block-uuid))))
(defn extract-all-block-refs
[content]
(map second (re-seq #"\(\(([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})\)\)" content)))
#?(:org.babashka/nbb
(alter-var-root #'gp-mldoc/parse-property (constantly text/parse-property))
:default
;; TODO: Properly fix this circular dependency:
;; mldoc/->edn > text/parse-property > mldoc/link? ->mldoc/inline->edn + mldoc/default-config
(set! gp-mldoc/parse-property text/parse-property))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.mldoc
(ns logseq.graph-parser.mldoc
;; Disable clj linters since we don't support clj
#?(:clj {:clj-kondo/config {:linters {:unresolved-namespace {:level :off}
:unresolved-symbol {:level :off}}}})

View File

@@ -2,7 +2,8 @@
"Property fns needed by graph-parser"
(:require [logseq.graph-parser.util :as gp-util]
[clojure.string :as string]
[goog.string :as gstring]))
[goog.string :as gstring]
[goog.string.format]))
(defn properties-ast?
[block]

View File

@@ -0,0 +1,160 @@
(ns logseq.graph-parser.test.docs-graph-helper
"Helper fns for setting up and running tests against docs graph"
(:require ["fs" :as fs]
["child_process" :as child-process]
[cljs.test :refer [is testing]]
[clojure.string :as string]
[logseq.graph-parser.config :as gp-config]
[datascript.core :as d]))
;; Helper fns for test setup
;; =========================
(defn- sh
"Run shell cmd synchronously and print to inherited streams by default. Aims
to be similar to babashka.tasks/shell"
[cmd opts]
(child-process/spawnSync (first cmd)
(clj->js (rest cmd))
(clj->js (merge {:stdio "inherit"} opts))))
(defn clone-docs-repo-if-not-exists
[dir]
(when-not (.existsSync fs dir)
(sh ["git" "clone" "--depth" "1" "-b" "v0.6.7" "-c" "advice.detachedHead=false"
"https://github.com/logseq/docs" dir] {})))
;; Fns for common test assertions
;; ==============================
(defn- get-top-block-properties
[db]
(->> (d/q '[:find (pull ?b [*])
:where
[?b :block/properties]
[(missing? $ ?b :block/name)]]
db)
(map first)
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
(apply merge-with +)
(filter #(>= (val %) 5))
(into {})))
(defn- get-all-page-properties
[db]
(->> (d/q '[:find (pull ?b [*])
:where
[?b :block/properties]
[?b :block/name]]
db)
(map first)
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
(apply merge-with +)
(into {})))
(defn- get-block-format-counts
[db]
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/format]] db)
(map first)
(group-by :block/format)
(map (fn [[k v]] [k (count v)]))
(into {})))
(defn- query-assertions
[db files]
(testing "Query based stats"
(is (= (->> files
;; logseq files aren't saved under :block/file
(remove #(string/includes? % (str "/" gp-config/app-name "/")))
set)
(->> (d/q '[:find (pull ?b [* {:block/file [:file/path]}])
:where [?b :block/name] [?b :block/file]]
db)
(map (comp #(get-in % [:block/file :file/path]) first))
set))
"Files on disk should equal ones in db")
(is (= (count (filter #(re-find #"journals/" %) files))
(->> (d/q '[:find (count ?b)
:where
[?b :block/journal? true]
[?b :block/name]
[?b :block/file]]
db)
ffirst))
"Journal page count on disk equals count in db")
(is (= {"CANCELED" 2 "DONE" 6 "LATER" 4 "NOW" 5}
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/marker] ]
db)
(map first)
(group-by :block/marker)
(map (fn [[k v]] [k (count v)]))
(into {})))
"Task marker counts")
(is (= {:markdown 3143 :org 460}
(get-block-format-counts db))
"Block format counts")
(is (= {:title 98 :id 98
:updated-at 47 :created-at 47
:card-last-score 6 :card-repeats 6 :card-next-schedule 6
:card-last-interval 6 :card-ease-factor 6 :card-last-reviewed 6
:alias 6}
(get-top-block-properties db))
"Counts for top block properties")
(is (= {:title 98
:alias 6
:tags 2 :permalink 2
:name 1 :type 1 :related 1 :sample 1 :click 1 :id 1 :example 1}
(get-all-page-properties db))
"Counts for all page properties")
(is (= {:block/scheduled 2
:block/priority 4
:block/deadline 1
:block/collapsed? 22
:block/heading-level 60
:block/repeated? 1}
(->> [:block/scheduled :block/priority :block/deadline :block/collapsed?
:block/heading-level :block/repeated?]
(map (fn [attr]
[attr
(ffirst (d/q [:find (list 'count '?b) :where ['?b attr]]
db))]))
(into {})))
"Counts for blocks with common block attributes")
(is (= #{"term" "setting" "book" "Templates" "Query" "Query/table" "page"}
(->> (d/q '[:find (pull ?n [*]) :where [?b :block/namespace ?n]] db)
(map (comp :block/original-name first))
set))
"Has correct namespaces")))
(defn docs-graph-assertions
"These are common assertions that should pass in both graph-parser and main
logseq app. It is important to run these in both contexts to ensure that the
functionality in frontend.handler.repo and logseq.graph-parser remain the
same"
[db files]
;; Counts assertions help check for no major regressions. These counts should
;; only increase over time as the docs graph rarely has deletions
(testing "Counts"
(is (= 211 (count files)) "Correct file count")
(is (= 40943 (count (d/datoms db :eavt))) "Correct datoms count")
(is (= 3600
(ffirst
(d/q '[:find (count ?b)
:where [?b :block/path-refs ?bp] [?bp :block/name]] db)))
"Correct referenced blocks count")
(is (= 21
(ffirst
(d/q '[:find (count ?b)
:where [?b :block/content ?content]
[(clojure.string/includes? ?content "+BEGIN_QUERY")]]
db)))
"Advanced query count"))
(query-assertions db files))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.text
(ns logseq.graph-parser.text
(:require ["path" :as path]
[goog.string :as gstring]
[clojure.string :as string]
@@ -51,8 +51,6 @@
(def page-ref-re-without-nested #"\[\[([^\[\]]+)\]\]")
(defonce between-re #"\(between ([^\)]+)\)")
(defn page-ref-un-brackets!
[s]
(or (get-page-name s) s))
@@ -122,18 +120,6 @@
[s]
(string/split s #"(\"[^\"]*\")"))
(def bilibili-regex #"^((?:https?:)?//)?((?:www).)?((?:bilibili.com))(/(?:video/)?)([\w-]+)(\S+)?$")
(def loom-regex #"^((?:https?:)?//)?((?:www).)?((?:loom.com))(/(?:share/|embed/))([\w-]+)(\S+)?$")
(def vimeo-regex #"^((?:https?:)?//)?((?:www).)?((?:player.vimeo.com|vimeo.com))(/(?:video/)?)([\w-]+)(\S+)?$")
(def youtube-regex #"^((?:https?:)?//)?((?:www|m).)?((?:youtube.com|youtu.be|y2u.be|youtube-nocookie.com))(/(?:[\w-]+\?v=|embed/|v/)?)([\w-]+)([\S^\?]+)?$")
(defn get-matched-video
[url]
(or (re-find youtube-regex url)
(re-find loom-regex url)
(re-find vimeo-regex url)
(re-find bilibili-regex url)))
(def markdown-link #"\[([^\[]+)\](\(.*\))")
(defn split-page-refs-without-brackets
@@ -228,16 +214,6 @@
:else
(remove-level-space-aux! text block-pattern space? trim-left?)))))
(defn build-data-value
[col]
(let [items (map (fn [item] (str "\"" item "\"")) col)]
(gstring/format "[%s]"
(string/join ", " items))))
(defn media-link?
[media-formats s]
(some (fn [fmt] (gp-util/safe-re-find (re-pattern (str "(?i)\\." fmt "(?:\\?([^#]*))?(?:#(.*))?$")) s)) media-formats))
(defn namespace-page?
[p]
(and (string? p)
@@ -246,100 +222,6 @@
(not (string/starts-with? p "./"))
(not (gp-util/url? p))))
(defn add-timestamp
[content key value]
(let [new-line (str (string/upper-case key) ": " value)
lines (string/split-lines content)
new-lines (map (fn [line]
(string/trim
(if (string/starts-with? (string/lower-case line) key)
new-line
line)))
lines)
new-lines (if (not= (map string/trim lines) new-lines)
new-lines
(cons (first new-lines) ;; title
(cons
new-line
(rest new-lines))))]
(string/join "\n" new-lines)))
(defn remove-timestamp
[content key]
(let [lines (string/split-lines content)
new-lines (filter (fn [line]
(not (string/starts-with? (string/lower-case line) key)))
lines)]
(string/join "\n" new-lines)))
(defn get-current-line-by-pos
[s pos]
(let [lines (string/split-lines s)
result (reduce (fn [acc line]
(let [new-pos (+ acc (count line))]
(if (>= new-pos pos)
(reduced line)
(inc new-pos)))) 0 lines)]
(when (string? result)
result)))
(defn get-string-all-indexes
"Get all indexes of `value` in the string `s`."
[s value]
(loop [acc []
i 0]
(if-let [i (string/index-of s value i)]
(recur (conj acc i) (+ i (count value)))
acc)))
(defn surround-by?
"`pos` must be surrounded by `before` and `and` in string `value`, e.g. ((|))"
[value pos before end]
(let [start-pos (if (= :start before) 0 (- pos (count before)))
end-pos (if (= :end end) (count value) (+ pos (count end)))]
(when (>= (count value) end-pos)
(= (cond
(and (= :end end) (= :start before))
""
(= :end end)
before
(= :start before)
end
:else
(str before end))
(subs value start-pos end-pos)))))
(defn wrapped-by?
"`pos` must be wrapped by `before` and `and` in string `value`, e.g. ((a|b))"
[value pos before end]
(let [before-matches (->> (get-string-all-indexes value before)
(map (fn [i] [i :before])))
end-matches (->> (get-string-all-indexes value end)
(map (fn [i] [i :end])))
indexes (sort-by first (concat before-matches end-matches [[pos :between]]))
ks (map second indexes)
q [:before :between :end]]
(true?
(reduce (fn [acc k]
(if (= q (conj acc k))
(reduced true)
(vec (take-last 2 (conj acc k)))))
[]
ks))))
(defn get-graph-name-from-path
[path]
(when (string? path)
(let [parts (->> (string/split path #"/")
(take-last 2))]
(-> (if (not= (first parts) "0")
(string/join "/" parts)
(last parts))
js/decodeURI))))
(defonce non-parsing-properties
(atom #{"background-color" "background_color"}))

View File

@@ -1,5 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.utf8
(:require [goog.object :as gobj]))
(ns logseq.graph-parser.utf8)
(defonce encoder
(js/TextEncoder. "utf-8"))
@@ -22,7 +21,3 @@
(if end
(decode (.subarray arr start end))
(decode (.subarray arr start)))))
(defn length
[arr]
(gobj/get arr "length"))

View File

@@ -1,4 +1,4 @@
(ns ^:nbb-compatible logseq.graph-parser.util
(ns logseq.graph-parser.util
"Util fns shared between graph-parser and rest of app. Util fns only rely on
clojure standard libraries."
(:require [clojure.walk :as walk]
@@ -7,9 +7,6 @@
(defn safe-re-find
"Copy of frontend.util/safe-re-find. Too basic to couple to main app"
[pattern s]
(when-not (string? s)
;; TODO: sentry
(js/console.trace))
(when (string? s)
(re-find pattern s)))
@@ -65,7 +62,7 @@
(try
(js/URL. s)
true
(catch js/Error _e
(catch :default _e
false))))
(defn json->clj

View File

@@ -0,0 +1,13 @@
(ns logseq.graph-parser.cli-test
(:require [cljs.test :refer [deftest]]
[logseq.graph-parser.cli :as gp-cli]
[logseq.graph-parser.test.docs-graph-helper :as docs-graph-helper]))
;; Integration test that test parsing a large graph like docs
(deftest ^:integration parse-graph
(let [graph-dir "test/docs"
_ (docs-graph-helper/clone-docs-repo-if-not-exists graph-dir)
{:keys [conn files]} (gp-cli/parse-graph graph-dir)
db @conn]
(docs-graph-helper/docs-graph-assertions db files)))

View File

@@ -1,12 +1,11 @@
(ns logseq.graph-parser.extract-test
(:require [cljs.test :refer [async deftest is]]
(:require [cljs.test :refer [deftest is]]
[logseq.graph-parser.extract :as extract]
[clojure.pprint :as pprint]
[promesa.core :as p]))
[clojure.pprint :as pprint]))
(defn- extract
[text]
(p/let [result (extract/extract-blocks-pages "a.md" text {:block-pattern "-"})
(let [result (extract/extract-blocks-pages "a.md" text {:block-pattern "-"})
result (last result)
lefts (map (juxt :block/parent :block/left) result)]
(if (not= (count lefts) (count (distinct lefts)))
@@ -15,34 +14,24 @@
(throw (js/Error. ":block/parent && :block/left conflicts")))
(mapv :block/content result))))
(defn- async-test
[x y]
(async done
(p/then
(extract x)
(fn [v]
(is (= y v))
(done)))))
(deftest test-extract-blocks-pages
[]
(async-test
"- a
(is (= ["a" "b" "c"]
(extract
"- a
- b
- c"
["a" "b" "c"])
- c")))
(async-test
"## hello
(is (= ["## hello" "world" "nice" "nice" "bingo" "world"]
(extract "## hello
- world
- nice
- nice
- bingo
- world"
["## hello" "world" "nice" "nice" "bingo" "world"])
- world")))
(async-test
"# a
(is (= ["# a" "## b" "### c" "#### d" "### e" "f" "g" "h" "i" "j"]
(extract "# a
## b
### c
#### d
@@ -51,17 +40,13 @@
- g
- h
- i
- j"
["# a" "## b" "### c" "#### d" "### e" "f" "g" "h" "i" "j"]))
- j"))))
(deftest test-regression-1902
[]
(async-test
"- line1
(is (= ["line1" "line2" "line3" "line4"]
(extract
"- line1
- line2
- line3
- line4"
["line1" "line2" "line3" "line4"]))
#_(cljs.test/run-tests)
- line4"))))

View File

@@ -1,7 +1,8 @@
(ns logseq.graph-parser.mldoc-test
(:require [logseq.graph-parser.mldoc :as gp-mldoc]
[clojure.string :as string]
[frontend.test.docs-graph-helper :as docs-graph-helper]
[logseq.graph-parser.test.docs-graph-helper :as docs-graph-helper]
[logseq.graph-parser.cli :as gp-cli]
[cljs.test :refer [testing deftest are is]]))
(deftest test-link
@@ -98,9 +99,9 @@
: definition" md-config {})))))
(deftest ^:integration test->edn
(let [graph-dir "src/test/docs"
(let [graph-dir "test/docs"
_ (docs-graph-helper/clone-docs-repo-if-not-exists graph-dir)
files (docs-graph-helper/build-graph-files graph-dir)
files (gp-cli/build-graph-files graph-dir)
asts-by-file (->> files
(map (fn [{:file/keys [path content]}]
(let [format (if (string/ends-with? path ".org")
@@ -116,10 +117,10 @@
"Drawer" 1,
"Example" 20,
"Footnote_Definition" 2,
"Heading" 3493,
"Heading" 3496,
"Hiccup" 15,
"List" 36,
"Paragraph" 411,
"List" 37,
"Paragraph" 417,
"Properties" 104,
"Property_Drawer" 188,
"Quote" 9,

View File

@@ -1,13 +1,12 @@
(ns logseq.graph-parser.nbb-test-runner
"Nbb tests for graph-parser"
(:require [cljs.test :as t]
[logseq.graph-parser.mldoc :as gp-mldoc]
[logseq.graph-parser.text :as text]
[logseq.graph-parser.text-test]
[logseq.graph-parser.mldoc-test]
[logseq.graph-parser.block-test]
[logseq.graph-parser.property-test]
[logseq.graph-parser.extract-test]))
[logseq.graph-parser.extract-test]
[logseq.graph-parser.cli-test]))
(defmethod cljs.test/report [:cljs.test/default :end-run-tests] [m]
(when-not (cljs.test/successful? m)
@@ -15,12 +14,9 @@
;; run this function with: nbb-logseq -m logseq.test.nbb-test-runner/run-tests
(defn run-tests []
;; This hack is the same as the one in frontend.format. This has to be in an nbb only
;; ns since alter-var-root doesn't exist in cljs and nbb doesn't support set! yet
#_:clj-kondo/ignore
(alter-var-root #'gp-mldoc/parse-property (constantly text/parse-property))
(t/run-tests 'logseq.graph-parser.mldoc-test
'logseq.graph-parser.text-test
'logseq.graph-parser.property-test
'logseq.graph-parser.block-test
'logseq.graph-parser.extract-test))
'logseq.graph-parser.extract-test
'logseq.graph-parser.cli-test))

View File

@@ -98,36 +98,6 @@
"**foobar" "foobar"
"*********************foobar" "foobar")))
(deftest test-add-timestamp
[]
(are [x y] (= x y)
(text/add-timestamp "LATER hello world\nhello"
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>\nhello"
(text/add-timestamp "LATER hello world "
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>"
(text/add-timestamp "LATER hello world\nfoo:: bar\ntest"
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>\nfoo:: bar\ntest"))
(deftest get-string-all-indexes
[]
(are [x y] (= x y)
(text/get-string-all-indexes "[[hello]] [[world]]" "[[")
[0 10]
(text/get-string-all-indexes "abc abc ab" "ab")
[0 4 8]
(text/get-string-all-indexes "a.c a.c ab" "a.")
[0 4]))
(deftest test-parse-property
(testing "parse-property"
(are [k v y] (= (text/parse-property k v {}) y)

411
deps/graph-parser/yarn.lock vendored Normal file
View File

@@ -0,0 +1,411 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@logseq/nbb-logseq@^0.5.103":
version "0.5.103"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-0.5.103.tgz#1084380cd54c92ca8cc94a8934cc777206e45cc0"
integrity sha512-V9UW0XrCaaadHUc6/Hp9wfGpQqkzqzoqnDGeSVZkWR6l3QwyqGi9mkhnhVcfTwAvxIfOgrfz93GcaeepV4pYNA==
dependencies:
import-meta-resolve "^1.1.1"
ansi-regex@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==
ansi-regex@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1"
integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==
builtins@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/builtins/-/builtins-4.1.0.tgz#1edd016dd91ce771a1ed6fc3b2b71fb918953250"
integrity sha512-1bPRZQtmKaO6h7qV1YHXNtr6nCK28k0Zo95KM4dXfILcZZwoHJBN1m3lfLv9LPkcOZlrSr+J1bzMaZFO98Yq0w==
dependencies:
semver "^7.0.0"
camelcase@^5.0.0:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
cliui@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49"
integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==
dependencies:
string-width "^2.1.1"
strip-ansi "^4.0.0"
wrap-ansi "^2.0.0"
code-point-at@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
dependencies:
nice-try "^1.0.4"
path-key "^2.0.1"
semver "^5.5.0"
shebang-command "^1.2.0"
which "^1.2.9"
decamelize@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
end-of-stream@^1.1.0:
version "1.4.4"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
dependencies:
once "^1.4.0"
execa@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
dependencies:
cross-spawn "^6.0.0"
get-stream "^4.0.0"
is-stream "^1.1.0"
npm-run-path "^2.0.0"
p-finally "^1.0.0"
signal-exit "^3.0.0"
strip-eof "^1.0.0"
find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
dependencies:
locate-path "^3.0.0"
get-caller-file@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a"
integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==
get-stream@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
dependencies:
pump "^3.0.0"
import-meta-resolve@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/import-meta-resolve/-/import-meta-resolve-1.1.1.tgz#244fd542fd1fae73550d4f8b3cde3bba1d7b2b18"
integrity sha512-JiTuIvVyPaUg11eTrNDx5bgQ/yMKMZffc7YSjvQeSMXy58DO2SQ8BtAf3xteZvmzvjYh14wnqNjL8XVeDy2o9A==
dependencies:
builtins "^4.0.0"
invert-kv@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02"
integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==
is-fullwidth-code-point@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs=
dependencies:
number-is-nan "^1.0.0"
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
lcid@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf"
integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==
dependencies:
invert-kv "^2.0.0"
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
dependencies:
p-locate "^3.0.0"
path-exists "^3.0.0"
lru-cache@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
dependencies:
yallist "^4.0.0"
map-age-cleaner@^0.1.1:
version "0.1.3"
resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a"
integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==
dependencies:
p-defer "^1.0.0"
mem@^4.0.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178"
integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==
dependencies:
map-age-cleaner "^0.1.1"
mimic-fn "^2.0.0"
p-is-promise "^2.0.0"
mimic-fn@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
mldoc@^1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/mldoc/-/mldoc-1.3.3.tgz#b7f39b48eb0ef3558619d3e3522265977bd78fe3"
integrity sha512-TzW06GBltdKxwWAxOvflPmIVedu6bzl9T4YoYqnDUyZ3kELFMllEgiYCh65PPW3xsRMA/5OcRQqqGZGiKEJEug==
dependencies:
yargs "^12.0.2"
nice-try@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
npm-run-path@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
dependencies:
path-key "^2.0.0"
number-is-nan@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
os-locale@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==
dependencies:
execa "^1.0.0"
lcid "^2.0.0"
mem "^4.0.0"
p-defer@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c"
integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
p-is-promise@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e"
integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==
p-limit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
dependencies:
p-try "^2.0.0"
p-locate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
dependencies:
p-limit "^2.0.0"
p-try@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
path-key@^2.0.0, path-key@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
require-directory@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
require-main-filename@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1"
integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=
semver@^5.5.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
semver@^7.0.0:
version "7.3.7"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f"
integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==
dependencies:
lru-cache "^6.0.0"
set-blocking@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
dependencies:
shebang-regex "^1.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
signal-exit@^3.0.0:
version "3.0.7"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
string-width@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=
dependencies:
code-point-at "^1.0.0"
is-fullwidth-code-point "^1.0.0"
strip-ansi "^3.0.0"
string-width@^2.0.0, string-width@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
dependencies:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^4.0.0"
strip-ansi@^3.0.0, strip-ansi@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
dependencies:
ansi-regex "^2.0.0"
strip-ansi@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
dependencies:
ansi-regex "^3.0.0"
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
dependencies:
isexe "^2.0.0"
wrap-ansi@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85"
integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=
dependencies:
string-width "^1.0.1"
strip-ansi "^3.0.1"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
"y18n@^3.2.1 || ^4.0.0":
version "4.0.3"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf"
integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==
yallist@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
yargs-parser@^11.1.1:
version "11.1.1"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4"
integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs@^12.0.2:
version "12.0.5"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13"
integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==
dependencies:
cliui "^4.0.0"
decamelize "^1.2.0"
find-up "^3.0.0"
get-caller-file "^1.0.1"
os-locale "^3.0.0"
require-directory "^2.1.1"
require-main-filename "^1.0.1"
set-blocking "^2.0.0"
string-width "^2.0.0"
which-module "^2.0.0"
y18n "^3.2.1 || ^4.0.0"
yargs-parser "^11.1.1"

View File

@@ -62,7 +62,7 @@ scripts/lint_rules.clj
Namespaces have the metadata flag `^:nbb-compatible` indicate they are compatible with https://github.com/logseq/nbb-logseq. This compatibility is necessary in order for namespaces to be reused by the frontend and CLIs. To confirm these compatibilities, run:
```
bb test:load-nbb-compatible-namespaces
bb test:load-namespaces-with-nbb
```
## Testing
@@ -135,3 +135,9 @@ Specs should go under `src/main/frontend/spec/` and be compatible with clojure
and clojurescript. See `frontend.spec.storage` for an example. By following
these conventions, specs should also be usable by babashka. This is helpful as it
allows for third party tools to be written with logseq's data model.
## Development Tools
There are some babashka tasks under `nbb:` which are useful for inspecting
database changes in realtime. See [these
docs](https://github.com/logseq/bb-tasks#logseqbb-tasksnbbwatch) for more info.

View File

@@ -7,7 +7,7 @@
(require '[babashka.pods :as pods])
(pods/load-pod 'clj-kondo/clj-kondo "2021.12.19")
(pods/load-pod 'clj-kondo/clj-kondo "2022.02.09")
(require '[pod.borkdude.clj-kondo :as clj-kondo])
;; define clj-kondo.core ns which is used by carve
(intern (create-ns 'clj-kondo.core) 'run! clj-kondo/run!)

View File

@@ -5,12 +5,13 @@
the team to maintain and understand them."
(:require [babashka.pods :as pods]
[clojure.pprint :as pprint]
[clojure.edn :as edn]
[clojure.set :as set]))
(pods/load-pod 'clj-kondo/clj-kondo "2021.12.19")
(pods/load-pod 'clj-kondo/clj-kondo "2022.02.09")
(require '[pod.borkdude.clj-kondo :as clj-kondo])
(def config
(def default-config
;; TODO: Discuss with team and agree on lower number
{:max-lines-count 100
;; Vars with these metadata flags are allowed. Name should indicate the reason
@@ -23,11 +24,14 @@
(defn -main
[args]
(let [paths (or args ["src"])
(let [paths [(or (first args) "src")]
config (or (some->> (second args) edn/read-string (merge default-config))
default-config)
{{:keys [var-definitions]} :analysis}
(clj-kondo/run!
{:lint paths
:config {:output {:analysis {:var-definitions {:meta true}}}}})
:config {:output {:analysis {:var-definitions {:meta true
:lang :cljs}}}}})
vars (->> var-definitions
(keep (fn [m]
(let [lines-count (inc (- (:end-row m) (:row m)))]
@@ -40,7 +44,7 @@
(sort-by :lines-count (fn [x y] (compare y x))))]
(if (seq vars)
(do
(println (format "The following vars exceed the line count max of %s:"
(println (format "\nThe following vars exceed the line count max of %s:"
(:max-lines-count config)))
(pprint/print-table vars)
(System/exit 1))

View File

@@ -1,15 +1,16 @@
(ns logseq.tasks.nbb
(:require [pod.borkdude.clj-kondo :as clj-kondo]
[clojure.string :as str]
[babashka.tasks :refer [shell]]))
(defn- fetch-meta-namespaces
"Return namespaces with metadata"
[paths]
(let [paths (or (seq paths) ["src"])
{{:keys [namespace-definitions]} :analysis}
(let [{{:keys [namespace-definitions]} :analysis}
(clj-kondo/run!
{:lint paths
:config {:output {:analysis {:namespace-definitions {:meta true}}}}})
:config {:output {:analysis {:namespace-definitions {:meta true
:lang :cljs}}}}})
matches (keep (fn [m]
(when (:meta m)
{:ns (:name m)
@@ -17,14 +18,30 @@
namespace-definitions)]
matches))
(defn- validate-namespaces
[namespaces classpath dir]
(assert (seq namespaces) "There must be some namespaces to check")
;; distinct b/c sometimes namespaces are duplicated with .cljc analysis
(doseq [n (distinct namespaces)]
(println "Requiring" n "...")
(shell {:dir dir} "yarn nbb-logseq -cp" classpath "-e" (format "(require '[%s])" n)))
(println "Success!"))
(defn load-compatible-namespaces
"Check nbb-compatible namespaces can be required by nbb-logseq"
[]
(let [namespaces (map :ns
(filter #(get-in % [:meta :nbb-compatible])
(fetch-meta-namespaces ["src/main"])))]
(assert (seq namespaces) "There must be some nbb namespaces to check")
(doseq [n namespaces]
(println "Requiring" n "...")
(shell "yarn nbb-logseq -cp src/main -e" (format "(require '[%s])" n)))
(println "Success!")))
(validate-namespaces namespaces "src/main" ".")))
(defn load-all-namespaces
"Check all namespaces in source path(s) can be required by nbb-logseq"
[dir & paths]
(let [{{:keys [namespace-definitions]} :analysis}
(clj-kondo/run!
{:lint (map #(str dir "/" %) paths)
:config {:output {:analysis {:namespace-definitions {:lang :cljs}}}}})]
(validate-namespaces (map :name namespace-definitions)
(str/join ":" paths)
dir)))

View File

@@ -53,6 +53,7 @@
[frontend.util :as util]
[frontend.util.clock :as clock]
[frontend.util.drawer :as drawer]
[frontend.util.text :as text-util]
[frontend.util.property :as property]
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser.util :as gp-util]
@@ -280,7 +281,7 @@
(contains? config/audio-formats ext)
(audio-cp @src)
(contains? (config/img-formats) ext)
(contains? (gp-config/img-formats) ext)
(resizable-image config title @src metadata full_text true)
(= ext :pdf)
@@ -825,7 +826,7 @@
(nil? metadata-show)
(or
(gp-config/local-asset? s)
(text/media-link? media-formats s)))
(text-util/media-link? media-formats s)))
(true? (boolean metadata-show))))
;; markdown
@@ -834,7 +835,7 @@
;; image http link
(and (or (string/starts-with? full-text "http://")
(string/starts-with? full-text "https://"))
(text/media-link? media-formats s)))))
(text-util/media-link? media-formats s)))))
(defn- relative-assets-path->absolute-path
[path]
@@ -1114,7 +1115,7 @@
(defn- macro-vimeo-cp
[_config arguments]
(when-let [url (first arguments)]
(when-let [vimeo-id (nth (util/safe-re-find text/vimeo-regex url) 5)]
(when-let [vimeo-id (nth (util/safe-re-find text-util/vimeo-regex url) 5)]
(when-not (string/blank? vimeo-id)
(let [width (min (- (util/get-width) 96)
560)
@@ -1134,7 +1135,7 @@
(when-let [id (cond
(<= (count url) 15) url
:else
(nth (util/safe-re-find text/bilibili-regex url) 5))]
(nth (util/safe-re-find text-util/bilibili-regex url) 5))]
(when-not (string/blank? id)
(let [width (min (- (util/get-width) 96)
560)
@@ -1155,7 +1156,7 @@
(let [width (min (- (util/get-width) 96)
560)
height (int (* width (/ 315 560)))
results (text/get-matched-video url)
results (text-util/get-matched-video url)
src (match results
[_ _ _ (:or "youtube.com" "youtu.be" "y2u.be") _ id _]
(if (= (count id) 11) ["youtube-player" id] url)
@@ -1303,7 +1304,7 @@
(when-let [youtube-id (cond
(== 11 (count url)) url
:else
(nth (util/safe-re-find text/youtube-regex url) 5))]
(nth (util/safe-re-find text-util/youtube-regex url) 5))]
(when-not (string/blank? youtube-id)
(youtube/youtube-video youtube-id))))
@@ -2023,7 +2024,7 @@
(util/clear-selection!)))}
(not slide?)
(merge attrs))
[:<>
[:div.flex.flex-row.justify-between
[:div.flex-1
@@ -2345,7 +2346,7 @@
(let [refs (model/get-page-names-by-ids
(->> (map :db/id refs)
(remove nil?)))]
(text/build-data-value refs)))
(text-util/build-data-value refs)))
(defn- get-children-refs
[children]
@@ -2466,14 +2467,14 @@
(block-handler/on-touch-move event block uuid *show-left-menu? *show-right-menu?))
:on-touch-end (fn [event]
(block-handler/on-touch-end event block uuid *show-left-menu? *show-right-menu?))
:on-touch-cancel block-handler/on-touch-cancel
:on-touch-cancel block-handler/on-touch-cancel
:on-mouse-over (fn [e]
(block-mouse-over uuid e *control-show? block-id doc-mode?))
:on-mouse-leave (fn [e]
(block-mouse-leave e *control-show? block-id doc-mode?))}
(when (not slide?)
(block-control config block uuid block-id collapsed? *control-show? edit?))
(when @*show-left-menu?
(block-left-menu config block))
(block-content-or-editor config block edit-input-id block-id heading-level edit?)
@@ -2485,7 +2486,7 @@
(dnd-separator-wrapper block block-id slide? false false)]))
(rum/defcs block-container < rum/reactive
(rum/local false ::show-block-left-menu?)
(rum/local false ::show-block-left-menu?)
(rum/local false ::show-block-right-menu?)
{:init (fn [state]
(let [[config block] (:rum/args state)

View File

@@ -5,7 +5,6 @@
[datascript.core :as d]
[frontend.components.lazy-editor :as lazy-editor]
[frontend.components.svg :as svg]
[frontend.config :as config]
[frontend.context.i18n :refer [t]]
[frontend.date :as date]
[frontend.db :as db]
@@ -100,10 +99,10 @@
(cond
;; image type
(and format (contains? (config/img-formats) format))
(and format (contains? (gp-config/img-formats) format))
[:img {:src path}]
(and format (contains? (config/text-formats) format))
(and format (contains? (gp-config/text-formats) format))
(when-let [file-content (db/get-file path)]
(let [content (string/trim file-content)
mode (util/get-file-ext path)]

View File

@@ -8,10 +8,10 @@
[frontend.db.model :as model]
[frontend.handler.page :as page-handler]
[frontend.state :as state]
[logseq.graph-parser.text :as text]
[logseq.graph-parser.util :as gp-util]
[frontend.ui :as ui]
[frontend.util :as util]
[frontend.util.text :as text-util]
[goog.object :as gobj]
[reitit.frontend.easy :as rfe]
[rum.core :as rum]))
@@ -32,7 +32,7 @@
page-entity (db/pull [:block/name (util/page-name-sanity-lc title)])
data-page-tags (when (seq (:block/tags page-entity))
(let [page-names (model/get-page-names-by-ids (map :db/id (:block/tags page)))]
(text/build-data-value page-names)))]
(text-util/build-data-value page-names)))]
[:div.flex-1.journal.page (cond-> {}
data-page-tags
(assoc :data-page-tags data-page-tags))

View File

@@ -25,10 +25,10 @@
[frontend.handler.route :as route-handler]
[frontend.mixins :as mixins]
[frontend.state :as state]
[logseq.graph-parser.text :as text]
[frontend.search :as search]
[frontend.ui :as ui]
[frontend.util :as util]
[frontend.util.text :as text-util]
[goog.object :as gobj]
[reitit.frontend.easy :as rfe]
[medley.core :as medley]
@@ -346,7 +346,7 @@
[:div.flex-1.page.relative
(merge (if (seq (:block/tags page))
(let [page-names (model/get-page-names-by-ids (map :db/id (:block/tags page)))]
{:data-page-tags (text/build-data-value page-names)})
{:data-page-tags (text-util/build-data-value page-names)})
{})
{:key path-page-name

View File

@@ -14,7 +14,7 @@
[reitit.frontend.easy :as rfe]
[rum.core :as rum]
[frontend.mobile.util :as mobile-util]
[logseq.graph-parser.text :as text]
[frontend.util.text :as text-util]
[promesa.core :as p]
[electron.ipc :as ipc]
[goog.object :as gobj]
@@ -54,7 +54,7 @@
[:div.flex.justify-between.mb-4 {:key id}
(if local?
(let [local-dir (config/get-local-dir url)
graph-name (text/get-graph-name-from-path local-dir)]
graph-name (text-util/get-graph-name-from-path local-dir)]
[:a {:title local-dir
:on-click #(state/pub-event! [:graph/switch url])}
graph-name])
@@ -89,7 +89,7 @@
repo-links (mapv
(fn [{:keys [url]}]
(let [repo-path (db/get-repo-name url)
short-repo-name (text/get-graph-name-from-path repo-path)]
short-repo-name (text-util/get-graph-name-from-path repo-path)]
{:title short-repo-name
:hover-detail repo-path ;; show full path on hover
:options {:class "ml-1"

View File

@@ -9,8 +9,8 @@
[frontend.state :as state]
[frontend.ui :as ui]
[frontend.util :as util]
[frontend.util.text :as text-util]
[frontend.db :as db]
[logseq.graph-parser.text :as text]
[rum.core :as rum]
[frontend.config :as config]
[frontend.handler.repo :as repo-handler]
@@ -76,7 +76,7 @@
(or (config/demo-graph? url)
(= url (state/get-current-repo)))))
(map (fn [{:keys [url]}]
{:value (text/get-graph-name-from-path
{:value (text-util/get-graph-name-from-path
;; TODO: Use helper when a common one is refactored
;; from components.repo
(if (config/local-db? url)
@@ -99,7 +99,7 @@
(remove (fn [{:keys [url]}]
(config/demo-graph? url)))
(map (fn [{:keys [url] :as original-graph}]
{:value (text/get-graph-name-from-path
{:value (text-util/get-graph-name-from-path
;; TODO: Use helper when a common one is refactored
;; from components.repo
(if (config/local-db? url)

View File

@@ -5,6 +5,7 @@
[frontend.util :as util]
[shadow.resource :as rc]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.config :as gp-config]
[frontend.mobile.util :as mobile-util]))
(goog-define DEV-RELEASE false)
@@ -63,49 +64,20 @@
(if dev? path
(str asset-domain path))))
(defn text-formats
[]
(let [config-formats (some->> (get-in @state/state [:config :text-formats])
(map :keyword)
(set))]
(set/union
config-formats
#{:json :org :md :yml :dat :asciidoc :rst :txt :markdown :adoc :html :js :ts :edn :clj :ml :rb :ex :erl :java :php :c :css
:excalidraw})))
(def markup-formats
#{:org :md :markdown :asciidoc :adoc :rst})
(defn img-formats
[]
(let [config-formats (some->> (get-in @state/state [:config :image-formats])
(map :keyword)
(set))]
(set/union
config-formats
#{:gif :svg :jpeg :ico :png :jpg :bmp :webp})))
(defn doc-formats
[]
(let [config-formats (some->> (get-in @state/state [:config :document-formats])
(map :keyword)
(set))]
(set/union
config-formats
#{:doc :docx :xls :xlsx :ppt :pptx :one :pdf :epub})))
#{:doc :docx :xls :xlsx :ppt :pptx :one :pdf :epub})
(def audio-formats #{:mp3 :ogg :mpeg :wav :m4a :flac :wma :aac})
(def media-formats (set/union (img-formats) audio-formats))
(def media-formats (set/union (gp-config/img-formats) audio-formats))
(def html-render-formats
#{:adoc :asciidoc})
(defn supported-formats
[]
(set/union (text-formats)
(img-formats)))
(def mobile?
(when-not util/node-test?
(util/safe-re-find #"Mobi" js/navigator.userAgent)))
@@ -114,13 +86,7 @@
(defn get-block-pattern
[format]
(let [format (or format (state/get-preferred-format))
format (keyword format)]
(case format
:org
"*"
"-")))
(gp-config/get-block-pattern (or format (state/get-preferred-format))))
(defn get-hr
[format]

View File

@@ -1,9 +1,9 @@
(ns frontend.db
(:require [clojure.core.async :as async]
[datascript.core :as d]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.conn :as conn]
[frontend.db.default :as default-db]
[logseq.graph-parser.db.default :as default-db]
[frontend.db.model]
[frontend.db.query-custom]
[frontend.db.query-react]
@@ -27,7 +27,6 @@
get-short-repo-name
datascript-db
get-db
me-tx
remove-conn!]
[frontend.db.utils
@@ -39,28 +38,27 @@
[frontend.db.model
blocks-count blocks-count-cache clean-export! delete-blocks get-pre-block
delete-file! delete-file-blocks! delete-page-blocks delete-file-pages! delete-file-tx delete-files delete-pages-by-files
delete-file-blocks! delete-page-blocks delete-files delete-pages-by-files
filter-only-public-pages-and-blocks get-all-block-contents get-all-tagged-pages
get-all-templates get-block-and-children get-block-by-uuid get-block-children sort-by-left
get-block-parent get-block-parents parents-collapsed? get-block-referenced-blocks
get-block-children-ids get-block-immediate-children get-block-page
get-blocks-contents get-custom-css
get-date-scheduled-or-deadlines get-db-type
get-file-blocks get-file-contents get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-file-pages get-files get-files-blocks get-files-full get-journals-length
get-latest-journals get-matched-blocks get-page get-page-alias get-page-alias-names get-paginated-blocks get-page-linked-refs-refed-pages
get-custom-css get-date-scheduled-or-deadlines
get-file-blocks get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-files get-files-blocks get-files-full get-journals-length
get-latest-journals get-page get-page-alias get-page-alias-names get-paginated-blocks get-page-linked-refs-refed-pages
get-page-blocks-count get-page-blocks-no-cache get-page-file get-page-format get-page-properties
get-page-referenced-blocks get-page-referenced-pages get-page-unlinked-references get-page-referenced-blocks-no-cache
get-all-pages get-pages get-pages-relation get-pages-that-mentioned-page get-public-pages get-tag-pages
journal-page? page-alias-set pull-block
set-file-last-modified-at! transact-files-db! page-empty? page-exists? page-empty-or-dummy? get-alias-source-page
set-file-last-modified-at! page-empty? page-exists? page-empty-or-dummy? get-alias-source-page
set-file-content! has-children? get-namespace-pages get-all-namespace-relation get-pages-by-name-partition]
[frontend.db.react
get-current-page set-key-value
remove-key! remove-q! remove-query-component! add-q! add-query-component! clear-query-state!
clear-query-state-without-refs-and-embeds! kv q
query-state query-components query-entity-in-component remove-custom-query! set-new-result! sub-key-value refresh!]
query-state query-components remove-custom-query! set-new-result! sub-key-value refresh!]
[frontend.db.query-custom
custom-query]
@@ -68,7 +66,7 @@
[frontend.db.query-react
react-query custom-query-result-transform]
[frontend.db.default built-in-pages-names built-in-pages])
[logseq.graph-parser.db.default built-in-pages-names built-in-pages])
(defn get-schema-version [db]
(d/q
@@ -147,16 +145,16 @@
(repo-listen-to-tx! repo conn)))
(defn start-db-conn!
([me repo]
(start-db-conn! me repo {}))
([me repo option]
(conn/start! me repo
([repo]
(start-db-conn! repo {}))
([repo option]
(conn/start! repo
(assoc option
:listen-handler listen-and-persist!))))
(defn restore-graph!
"Restore db from serialized db cache, and swap into the current db status"
[repo me]
[repo]
(p/let [db-name (datascript-db repo)
db-conn (d/create-conn db-schema/schema)
_ (swap! conns assoc db-name db-conn)
@@ -166,9 +164,8 @@
(catch js/Error _e
(js/console.warn "Invalid graph cache")
(d/empty-db db-schema/schema)))
attached-db (d/db-with stored-db (concat
[(me-tx stored-db me)]
default-db/built-in-pages)) ;; TODO bug overriding uuids?
attached-db (d/db-with stored-db
default-db/built-in-pages) ;; TODO bug overriding uuids?
db (if (old-schema? attached-db)
(db-migrate/migrate attached-db)
attached-db)]
@@ -176,10 +173,10 @@
(d/transact! db-conn [{:schema/version db-schema/version}])))
(defn restore!
[{:keys [repos] :as me} _old-db-schema restore-config-handler]
[{:keys [repos]} _old-db-schema restore-config-handler]
(let [repo (or (state/get-current-repo) (:url (first repos)))]
(when repo
(p/let [_ (restore-graph! repo me)]
(p/let [_ (restore-graph! repo)]
(restore-config-handler repo)
(listen-and-persist! repo)))))

View File

@@ -1,15 +1,13 @@
(ns frontend.db.conn
"Contains db connections."
(:require [clojure.string :as string]
[frontend.db-schema :as db-schema]
[frontend.db.default :as default-db]
[frontend.util :as util]
[frontend.mobile.util :as mobile-util]
[frontend.state :as state]
[frontend.config :as config]
[frontend.util.text :as text-util]
[logseq.graph-parser.text :as text]
[logseq.graph-parser.util :as gp-util]
[datascript.core :as d]))
[logseq.graph-parser.db :as gp-db]))
(defonce conns (atom {}))
@@ -24,7 +22,7 @@
[repo]
(cond
(mobile-util/native-platform?)
(text/get-graph-name-from-path repo)
(text-util/get-graph-name-from-path repo)
(config/local-db? repo)
(config/get-local-dir repo)
@@ -68,30 +66,13 @@
[repo]
(swap! conns dissoc (datascript-db repo)))
(defn me-tx
[_db {:keys [name email avatar]}]
(gp-util/remove-nils {:me/name name
:me/email email
:me/avatar avatar}))
(defn start!
([me repo]
(start! me repo {}))
([me repo {:keys [db-type listen-handler]}]
([repo]
(start! repo {}))
([repo {:keys [listen-handler]}]
(let [db-name (datascript-db repo)
db-conn (d/create-conn db-schema/schema)]
db-conn (gp-db/start-conn)]
(swap! conns assoc db-name db-conn)
(d/transact! db-conn [(cond-> {:schema/version db-schema/version}
db-type
(assoc :db/type db-type))
{:block/name "card"
:block/original-name "card"
:block/uuid (d/squuid)}])
(when me
(d/transact! db-conn [(me-tx (d/db db-conn) me)]))
(d/transact! db-conn default-db/built-in-pages)
(when listen-handler
(listen-handler repo)))))

View File

@@ -8,7 +8,7 @@
[datascript.core :as d]
[frontend.config :as config]
[frontend.date :as date]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.conn :as conn]
[frontend.db.react :as react]
[frontend.db.utils :as db-utils]
@@ -16,7 +16,7 @@
[frontend.util :as util :refer [react]]
[logseq.graph-parser.util :as gp-util]
[frontend.db.rules :refer [rules]]
[frontend.db.default :as default-db]
[logseq.graph-parser.db.default :as default-db]
[frontend.util.drawer :as drawer]))
;; lazy loading
@@ -56,18 +56,6 @@
{:block/page [:db/id :block/name :block/original-name :block/journal-day]}
{:block/_parent ...}])
(defn transact-files-db!
([tx-data]
(db-utils/transact! (state/get-current-repo) tx-data))
([repo-url tx-data]
(when-not config/publishing?
(let [tx-data (->> (gp-util/remove-nils tx-data)
(remove nil?)
(map #(dissoc % :file/handle :file/type)))]
(when (seq tx-data)
(when-let [conn (conn/get-db repo-url false)]
(d/transact! conn (vec tx-data))))))))
(defn pull-block
[id]
(let [repo (state/get-current-repo)]
@@ -208,16 +196,6 @@
(conn/get-db repo-url) path)
db-utils/seq-flatten))
(defn get-file-pages
[repo-url path]
(-> (d/q '[:find ?page
:in $ ?path
:where
[?file :file/path ?path]
[?page :block/file ?file]]
(conn/get-db repo-url) path)
db-utils/seq-flatten))
(defn set-file-last-modified-at!
[repo path last-modified-at]
(when (and repo path last-modified-at)
@@ -240,19 +218,6 @@
(when-let [db (conn/get-db repo)]
(d/entity db [:file/path path]))))
(defn get-file-contents
[repo]
(when-let [db (conn/get-db repo)]
(->>
(d/q
'[:find ?path ?content
:where
[?file :file/path ?path]
[?file :file/content ?content]]
db)
(into {}))))
(defn get-files-full
[repo]
(when-let [db (conn/get-db repo)]
@@ -1305,43 +1270,11 @@
(get-block-referenced-blocks-ids id)
(get-page-referenced-blocks-ids page-name-or-block-uuid)))
(defn get-matched-blocks
[match-fn limit]
(when-let [repo (state/get-current-repo)]
(let [pred (fn [_db content]
(match-fn content))]
(->> (d/q
'[:find ?block
:in $ ?pred
:where
[?block :block/content ?content]
[(?pred $ ?content)]]
(conn/get-db)
pred)
(take limit)
db-utils/seq-flatten
(db-utils/pull-many '[:block/uuid
:block/content
:block/properties
:block/format
{:block/page [:block/name]}])))))
;; TODO: Does the result preserves the order of the arguments?
(defn get-blocks-contents
[repo block-uuids]
(let [db (conn/get-db repo)]
(db-utils/pull-many repo '[:block/content]
(mapv (fn [id] [:block/uuid id]) block-uuids))))
(defn journal-page?
"sanitized page-name only"
[page-name]
(:block/journal? (db-utils/entity [:block/name page-name])))
(defn get-db-type
[repo]
(db-utils/get-key-value repo :db/type))
(defn get-public-pages
[db]
(-> (d/q
@@ -1516,24 +1449,6 @@
block-eids (mapv :e datoms)]
(mapv (fn [eid] [:db.fn/retractEntity eid]) block-eids)))))))
(defn delete-file-pages!
[repo-url path]
(let [pages (get-file-pages repo-url path)]
(mapv (fn [eid] [:db.fn/retractEntity eid]) pages)))
(defn delete-file-tx
[repo-url file-path]
(->>
(concat
(delete-file-blocks! repo-url file-path)
(delete-file-pages! repo-url file-path)
[[:db.fn/retractEntity [:file/path file-path]]])
(remove nil?)))
(defn delete-file!
[repo-url file-path]
(db-utils/transact! repo-url (delete-file-tx repo-url file-path)))
(defn delete-pages-by-files
[files]
(let [pages (->> (mapv get-file-page files)

View File

@@ -14,6 +14,7 @@
[frontend.db.rules :as rules]
[frontend.template :as template]
[logseq.graph-parser.text :as text]
[frontend.util.text :as text-util]
[frontend.util :as util]))
@@ -443,17 +444,18 @@ Some bindings in this fn:
[s]
(some-> s
(string/replace text/page-ref-re "\"[[$1]]\"")
(string/replace text/between-re (fn [[_ x]]
(->> (string/split x #" ")
(remove string/blank?)
(map (fn [x]
(if (or (contains? #{"+" "-"} (first x))
(and (util/safe-re-find #"\d" (first x))
(some #(string/ends-with? x %) ["y" "m" "d" "h" "min"])))
(keyword (name x))
x)))
(string/join " ")
(util/format "(between %s)"))))))
(string/replace text-util/between-re
(fn [[_ x]]
(->> (string/split x #" ")
(remove string/blank?)
(map (fn [x]
(if (or (contains? #{"+" "-"} (first x))
(and (util/safe-re-find #"\d" (first x))
(some #(string/ends-with? x %) ["y" "m" "d" "h" "min"])))
(keyword (name x))
x)))
(string/join " ")
(util/format "(between %s)"))))))
(defn- add-bindings!
[form q]

View File

@@ -137,21 +137,6 @@
;; Reactive query
(defn query-entity-in-component
([id-or-lookup-ref]
(db-utils/entity (state/get-current-repo) id-or-lookup-ref))
([repo id-or-lookup-ref]
(let [k [:entity id-or-lookup-ref]
result-atom (:result (get @query-state k))]
(when-let [component *query-component*]
(add-query-component! k component))
(when-let [db (conn/get-db repo)]
(let [result (d/entity db id-or-lookup-ref)
result-atom (or result-atom (atom nil))]
(set! (.-state result-atom) result)
(add-q! k nil nil result-atom identity identity identity))))))
(defn get-query-cached-result
[k]
(:result (get @query-state k)))

View File

@@ -6,7 +6,7 @@
[cljs-bean.core :as bean]
[frontend.util :as util]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.text :as text]))
[frontend.util.text :as text-util]))
(defn diff
[s1 s2]
@@ -47,7 +47,7 @@
:else
(recur r1 t2 (inc i1) i2))))
current-line (text/get-current-line-by-pos markup pos)]
current-line (text-util/get-current-line-by-pos markup pos)]
(cond
(= (util/nth-safe markup pos)
(util/nth-safe markup (inc pos))

View File

@@ -3,14 +3,9 @@
[frontend.format.adoc :refer [->AdocMode]]
[frontend.format.protocol :as protocol]
[logseq.graph-parser.mldoc :as gp-mldoc]
[logseq.graph-parser.text :as text]
[logseq.graph-parser.util :as gp-util]
[clojure.string :as string]))
;; TODO: Properly fix this circular dependency:
;; mldoc/->edn > text/parse-property > mldoc/link? ->mldoc/inline->edn + mldoc/default-config
(set! gp-mldoc/parse-property text/parse-property)
(defonce mldoc-record (->MldocMode))
(defonce adoc-record (->AdocMode))

View File

@@ -6,6 +6,7 @@
[frontend.db :as db]
[frontend.format :as format]
[frontend.state :as state]
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser.property :as gp-property]
[logseq.graph-parser.mldoc :as gp-mldoc]))
@@ -15,7 +16,7 @@
(gp-block/extract-blocks blocks content with-id? format
{:user-config (state/get-config)
:block-pattern (config/get-block-pattern format)
:supported-formats (config/supported-formats)
:supported-formats (gp-config/supported-formats)
:db (db/get-db (state/get-current-repo))
:date-formatter (state/get-date-formatter)}))

View File

@@ -4,12 +4,12 @@
[frontend.db :as db]
[frontend.db.model :as model]
[frontend.handler.editor :as editor]
[logseq.graph-parser.extract :as extract]
[frontend.handler.file :as file-handler]
[frontend.handler.page :as page-handler]
[frontend.handler.repo :as repo-handler]
[frontend.handler.ui :as ui-handler]
[logseq.graph-parser.util :as gp-util]
[frontend.util.text :as text-util]
[lambdaisland.glogi :as log]
[electron.ipc :as ipc]
[promesa.core :as p]
@@ -21,7 +21,7 @@
(defn- set-missing-block-ids!
[content]
(when (string? content)
(doseq [block-id (extract/extract-all-block-refs content)]
(doseq [block-id (text-util/extract-all-block-refs content)]
(when-let [block (try
(model/get-block-by-uuid block-id)
(catch js/Error _e

View File

@@ -6,7 +6,7 @@
[frontend.config :as config]
[frontend.context.i18n :as i18n]
[frontend.db :as db]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.conn :as conn]
[frontend.db.react :as react]
[frontend.error :as error]

View File

@@ -13,7 +13,7 @@
[frontend.config :as config]
[frontend.date :as date]
[frontend.db :as db]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.model :as db-model]
[frontend.db.utils :as db-utils]
[frontend.diff :as diff]
@@ -48,6 +48,7 @@
[frontend.util.priority :as priority]
[frontend.util.property :as property]
[frontend.util.thingatpt :as thingatpt]
[frontend.util.text :as text-util]
[goog.dom :as gdom]
[goog.dom.classes :as gdom-classes]
[goog.object :as gobj]
@@ -815,7 +816,7 @@
tail-len (count value)
pos (max
(if original-content
(utf8/length (utf8/encode original-content))
(gobj/get (utf8/encode original-content) "length")
0)
0)]
(edit-block! block pos id
@@ -935,8 +936,8 @@
(when-let [block (db/pull [:block/uuid block-id])]
(let [{:block/keys [content]} block
content (or content (state/get-edit-content))
new-content (-> (text/remove-timestamp content key)
(text/add-timestamp key value))]
new-content (-> (text-util/remove-timestamp content key)
(text-util/add-timestamp key value))]
(when (not= content new-content)
(let [input-id (state/get-edit-input-id)]
(if (and input-id
@@ -1567,7 +1568,7 @@
(when input
(let [value (gobj/get input "value")
pos (cursor/pos input)]
(text/surround-by? value pos before end))))
(text-util/surround-by? value pos before end))))
(defn wrapped-by?
[input before end]
@@ -1575,7 +1576,7 @@
(let [value (gobj/get input "value")
pos (dec (cursor/pos input))]
(when (>= pos 0)
(text/wrapped-by? value pos before end)))))
(text-util/wrapped-by? value pos before end)))))
(defn get-matched-pages
"Return matched page names"
@@ -2877,7 +2878,7 @@
(defn wrap-macro-url
[url]
(cond
(boolean (text/get-matched-video url))
(boolean (text-util/get-matched-video url))
(util/format "{{video %s}}" url)
(string/includes? url "twitter.com")

View File

@@ -13,7 +13,7 @@
[frontend.config :as config]
[frontend.context.i18n :refer [t]]
[frontend.db :as db]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.encrypt :as encrypt]
[frontend.extensions.srs :as srs]
[frontend.fs :as fs]

View File

@@ -2,8 +2,6 @@
(:refer-clojure :exclude [load-file])
(:require ["/frontend/utils" :as utils]
[borkdude.rewrite-edn :as rewrite]
[cljs-time.coerce :as tc]
[cljs-time.core :as t]
[cljs.core.async.interop :refer [<p!]]
[clojure.core.async :as async]
[frontend.config :as config]
@@ -11,16 +9,15 @@
[frontend.fs :as fs]
[frontend.fs.nfs :as nfs]
[frontend.handler.common :as common-handler]
[logseq.graph-parser.extract :as extract]
[frontend.handler.ui :as ui-handler]
[frontend.state :as state]
[frontend.util :as util]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.config :as gp-config]
[lambdaisland.glogi :as log]
[promesa.core :as p]
[frontend.mobile.util :as mobile]
[clojure.set :as set]))
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser :as graph-parser]))
;; TODO: extract all git ops using a channel
@@ -49,11 +46,11 @@
(defn- only-text-formats
[files]
(keep-formats files (config/text-formats)))
(keep-formats files (gp-config/text-formats)))
(defn- only-image-formats
[files]
(keep-formats files (config/img-formats)))
(keep-formats files (gp-config/img-formats)))
(defn restore-config!
([repo-url project-changed-check?]
@@ -91,10 +88,25 @@
(when (not= file current-file)
current-file))))
(defn- get-delete-blocks [repo-url first-page file]
(let [delete-blocks (->
(concat
(db/delete-file-blocks! repo-url file)
(when first-page (db/delete-page-blocks repo-url (:block/name first-page))))
(distinct))]
(when-let [current-file (page-exists-in-another-file repo-url first-page file)]
(when (not= file current-file)
(let [error (str "Page already exists with another file: " current-file ", current file: " file)]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}]))))
delete-blocks))
(defn reset-file!
([repo-url file content]
(reset-file! repo-url file content {}))
([repo-url file content {:keys [new-graph? from-disk?]}]
([repo-url file content options]
(let [electron-local-repo? (and (util/electron?)
(config/local-db? repo-url))
file (cond
@@ -118,53 +130,18 @@
file)
file (gp-util/path-normalize file)
new? (nil? (db/entity [:file/path file]))]
(db/set-file-content! repo-url file content)
(let [format (gp-util/get-format file)
file-content [{:file/path file}]
tx (if (contains? gp-config/mldoc-support-formats format)
(let [[pages blocks]
(extract/extract-blocks-pages
file
content
{:user-config (state/get-config)
:date-formatter (state/get-date-formatter)
:page-name-order (state/page-name-order)
:block-pattern (config/get-block-pattern format)
:supported-formats (config/supported-formats)
:db (db/get-db (state/get-current-repo))})
first-page (first pages)
delete-blocks (->
(concat
(db/delete-file-blocks! repo-url file)
(when first-page (db/delete-page-blocks repo-url (:block/name first-page))))
(distinct))
_ (when-let [current-file (page-exists-in-another-file repo-url first-page file)]
(when (not= file current-file)
(let [error (str "Page already exists with another file: " current-file ", current file: " file)]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}]))))
block-ids (map (fn [block] {:block/uuid (:block/uuid block)}) blocks)
block-refs-ids (->> (mapcat :block/refs blocks)
(filter (fn [ref] (and (vector? ref)
(= :block/uuid (first ref)))))
(map (fn [ref] {:block/uuid (second ref)}))
(seq))
;; To prevent "unique constraint" on datascript
block-ids (set/union (set block-ids) (set block-refs-ids))
pages (extract/with-ref-pages pages blocks)
pages-index (map #(select-keys % [:block/name]) pages)]
;; does order matter?
(concat file-content pages-index delete-blocks pages block-ids blocks))
file-content)
tx (concat tx [(let [t (tc/to-long (t/now))] ;; TODO: use file system timestamp?
(cond->
{:file/path file}
new?
(assoc :file/created-at t)))])]
(db/transact! repo-url tx {:new-graph? new-graph?
:from-disk? from-disk?})))))
(graph-parser/parse-file
(db/get-db repo-url false)
file
content
(merge options
{:new? new?
:delete-blocks-fn (partial get-delete-blocks repo-url)
:extract-options {:user-config (state/get-config)
:date-formatter (state/get-date-formatter)
:page-name-order (state/page-name-order)
:block-pattern (config/get-block-pattern (gp-util/get-format file))
:supported-formats (gp-config/supported-formats)}})))))
;; TODO: Remove this function in favor of `alter-files`
(defn alter-file

View File

@@ -2,7 +2,7 @@
(:require [clojure.set :as set]
[clojure.string :as string]
[frontend.db :as db]
[frontend.db.default :as default-db]
[logseq.graph-parser.db.default :as default-db]
[frontend.state :as state]
[frontend.util :as util]))

View File

@@ -7,7 +7,7 @@
[frontend.config :as config]
[frontend.date :as date]
[frontend.db :as db]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.model :as model]
[frontend.db.utils :as db-utils]
[frontend.db.conn :as conn]

View File

@@ -22,9 +22,8 @@
[shadow.resource :as rc]
[frontend.db.persist :as db-persist]
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser :as graph-parser]
[electron.ipc :as ipc]
[clojure.set :as set]
[clojure.core.async :as async]
[frontend.encrypt :as encrypt]))
@@ -215,30 +214,19 @@
(defn- parse-files-and-create-default-files-inner!
[repo-url files delete-files delete-blocks file-paths db-encrypted? re-render? re-render-opts opts]
(let [support-files (filter
(fn [file]
(let [format (gp-util/get-format (:file/path file))]
(contains? (set/union #{:edn :css} gp-config/mldoc-support-formats) format)))
files)
support-files (sort-by :file/path support-files)
{journals true non-journals false} (group-by (fn [file] (string/includes? (:file/path file) "journals/")) support-files)
{built-in true others false} (group-by (fn [file]
(or (string/includes? (:file/path file) "contents.")
(string/includes? (:file/path file) ".edn")
(string/includes? (:file/path file) "custom.css"))) non-journals)
support-files' (concat (reverse journals) built-in others)
(let [supported-files (graph-parser/filter-files files)
new-graph? (:new-graph? opts)
delete-data (->> (concat delete-files delete-blocks)
(remove nil?))
chan (async/to-chan! support-files')
chan (async/to-chan! supported-files)
graph-added-chan (async/promise-chan)]
(when (seq delete-data) (db/transact! repo-url delete-data))
(state/set-current-repo! repo-url)
(state/set-parsing-state! {:total (count support-files')})
(state/set-parsing-state! {:total (count supported-files)})
;; Synchronous for tests for not breaking anything
(if util/node-test?
(do
(doseq [file support-files']
(doseq [file supported-files]
(state/set-parsing-state! (fn [m]
(assoc m :current-parsing-file (:file/path file))))
(parse-and-load-file! repo-url file new-graph?))
@@ -354,9 +342,9 @@
(delete-db-f)))))
(defn start-repo-db-if-not-exists!
[repo option]
[repo]
(state/set-current-repo! repo)
(db/start-db-conn! nil repo option))
(db/start-db-conn! repo))
(defn setup-local-repo-if-not-exists!
[]
@@ -364,7 +352,7 @@
(let [repo config/local-repo]
(p/do! (fs/mkdir-if-not-exists (str "/" repo))
(state/set-current-repo! repo)
(db/start-db-conn! nil repo)
(db/start-db-conn! repo)
(when-not config/publishing?
(let [dummy-notes (t :tutorial/dummy-notes)]
(create-dummy-notes-page repo dummy-notes)))
@@ -380,19 +368,16 @@
(js/setTimeout setup-local-repo-if-not-exists! 100)))
(defn restore-and-setup-repo!
"Restore the db of a graph from the persisted data, and setup.
Create a new conn, or replace the conn in state with a new one.
me: optional, identity data, can be retrieved from `(state/get-me)` or `nil`"
([repo]
(restore-and-setup-repo! repo (state/get-me)))
([repo me]
(p/let [_ (state/set-db-restoring! true)
_ (db/restore-graph! repo me)]
(file-handler/restore-config! repo false)
;; Don't have to unlisten the old listerner, as it will be destroyed with the conn
(db/listen-and-persist! repo)
(ui-handler/add-style-if-exists!)
(state/set-db-restoring! false))))
"Restore the db of a graph from the persisted data, and setup. Create a new
conn, or replace the conn in state with a new one."
[repo]
(p/let [_ (state/set-db-restoring! true)
_ (db/restore-graph! repo)]
(file-handler/restore-config! repo false)
;; Don't have to unlisten the old listerner, as it will be destroyed with the conn
(db/listen-and-persist! repo)
(ui-handler/add-style-if-exists!)
(state/set-db-restoring! false)))
(defn rebuild-index!
[url]

View File

@@ -174,7 +174,7 @@
(assoc file :file/content content))) markup-files))
(p/then (fn [result]
(let [files (map #(dissoc % :file/file) result)]
(repo-handler/start-repo-db-if-not-exists! repo {:db-type :local-native-fs})
(repo-handler/start-repo-db-if-not-exists! repo)
(async/go
(let [_finished? (async/<! (repo-handler/load-repo-to-db! repo
{:new-graph? true
@@ -321,7 +321,7 @@
(search/reset-indice! repo)
(db/remove-conn! repo)
(db/clear-query-state!)
(db/start-db-conn! (state/get-me) repo)
(db/start-db-conn! repo)
(p/let [_ (reload-dir! repo true)
_ (ok-handler)]
(state/set-nfs-refreshing! false))))

View File

@@ -1,4 +1,4 @@
(ns frontend.mobile.deeplink
(ns frontend.mobile.deeplink
(:require
[clojure.string :as string]
[frontend.config :as config]
@@ -9,7 +9,7 @@
[frontend.handler.user :as user-handler]
[frontend.mobile.intent :as intent]
[frontend.state :as state]
[logseq.graph-parser.text :as text]))
[frontend.util.text :as text-util]))
(def *link-to-another-graph (atom false))
@@ -19,7 +19,7 @@
pathname (.-pathname parsed-url)
search-params (.-searchParams parsed-url)
current-repo-url (state/get-current-repo)
get-graph-name-fn #(-> (text/get-graph-name-from-path %)
get-graph-name-fn #(-> (text-util/get-graph-name-from-path %)
(string/split "/")
last
string/lower-case)

View File

@@ -13,10 +13,10 @@
[frontend.mobile.util :as mobile-util]
[frontend.state :as state]
[frontend.util :as util]
[frontend.util.text :as text-util]
[lambdaisland.glogi :as log]
[logseq.graph-parser.config :as gp-config]
[logseq.graph-parser.mldoc :as gp-mldoc]
[logseq.graph-parser.text :as text]
[promesa.core :as p]))
(defn- handle-received-text [result]
@@ -34,7 +34,7 @@
(string/split url "\"\n"))
text (some-> text (string/replace #"^\"" ""))
url (and url
(cond (boolean (text/get-matched-video url))
(cond (boolean (text-util/get-matched-video url))
(util/format "{{video %s}}" url)
(and (string/includes? url "twitter.com")

View File

@@ -4,7 +4,7 @@
[datascript.impl.entity :as de]
[frontend.db :as db]
[frontend.db.model :as db-model]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.conn :as conn]
[frontend.db.outliner :as db-outliner]
[frontend.modules.outliner.datascript :as ds]

View File

@@ -2,7 +2,7 @@
(:require [frontend.state :as state]
[datascript.core :as d]
[frontend.db :as db]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[rum.core :as rum]
[frontend.handler.route :as route]
[frontend.page :as page]

View File

@@ -25,4 +25,3 @@
;; repo
(s/def :repos/url string?)
(s/def :repos/branch string?)

View File

@@ -13,6 +13,7 @@
[goog.object :as gobj]
[promesa.core :as p]
[rum.core :as rum]
[logseq.graph-parser.config :as gp-config]
[frontend.mobile.util :as mobile-util]))
(defonce ^:large-vars/data-var state
@@ -1007,15 +1008,7 @@
(defn get-date-formatter
[]
(or
(when-let [repo (get-current-repo)]
(or
(get-in @state [:config repo :journal/page-title-format])
;; for compatibility
(get-in @state [:config repo :date-formatter])))
;; TODO:
(get-in @state [:me :settings :date-formatter])
"MMM do, yyyy"))
(gp-config/get-date-formatter (get-config)))
(defn shortcuts []
(get-in @state [:config (get-current-repo) :shortcuts]))

View File

@@ -0,0 +1,126 @@
(ns frontend.util.text
(:require [clojure.string :as string]
[goog.string :as gstring]
[frontend.util :as util]))
(defonce between-re #"\(between ([^\)]+)\)")
(def bilibili-regex #"^((?:https?:)?//)?((?:www).)?((?:bilibili.com))(/(?:video/)?)([\w-]+)(\S+)?$")
(def loom-regex #"^((?:https?:)?//)?((?:www).)?((?:loom.com))(/(?:share/|embed/))([\w-]+)(\S+)?$")
(def vimeo-regex #"^((?:https?:)?//)?((?:www).)?((?:player.vimeo.com|vimeo.com))(/(?:video/)?)([\w-]+)(\S+)?$")
(def youtube-regex #"^((?:https?:)?//)?((?:www|m).)?((?:youtube.com|youtu.be|y2u.be|youtube-nocookie.com))(/(?:[\w-]+\?v=|embed/|v/)?)([\w-]+)([\S^\?]+)?$")
(defn get-matched-video
[url]
(or (re-find youtube-regex url)
(re-find loom-regex url)
(re-find vimeo-regex url)
(re-find bilibili-regex url)))
(defn build-data-value
[col]
(let [items (map (fn [item] (str "\"" item "\"")) col)]
(gstring/format "[%s]"
(string/join ", " items))))
(defn media-link?
[media-formats s]
(some (fn [fmt] (util/safe-re-find (re-pattern (str "(?i)\\." fmt "(?:\\?([^#]*))?(?:#(.*))?$")) s)) media-formats))
(defn add-timestamp
[content key value]
(let [new-line (str (string/upper-case key) ": " value)
lines (string/split-lines content)
new-lines (map (fn [line]
(string/trim
(if (string/starts-with? (string/lower-case line) key)
new-line
line)))
lines)
new-lines (if (not= (map string/trim lines) new-lines)
new-lines
(cons (first new-lines) ;; title
(cons
new-line
(rest new-lines))))]
(string/join "\n" new-lines)))
(defn remove-timestamp
[content key]
(let [lines (string/split-lines content)
new-lines (filter (fn [line]
(not (string/starts-with? (string/lower-case line) key)))
lines)]
(string/join "\n" new-lines)))
(defn get-current-line-by-pos
[s pos]
(let [lines (string/split-lines s)
result (reduce (fn [acc line]
(let [new-pos (+ acc (count line))]
(if (>= new-pos pos)
(reduced line)
(inc new-pos)))) 0 lines)]
(when (string? result)
result)))
(defn surround-by?
"`pos` must be surrounded by `before` and `and` in string `value`, e.g. ((|))"
[value pos before end]
(let [start-pos (if (= :start before) 0 (- pos (count before)))
end-pos (if (= :end end) (count value) (+ pos (count end)))]
(when (>= (count value) end-pos)
(= (cond
(and (= :end end) (= :start before))
""
(= :end end)
before
(= :start before)
end
:else
(str before end))
(subs value start-pos end-pos)))))
(defn get-string-all-indexes
"Get all indexes of `value` in the string `s`."
[s value]
(loop [acc []
i 0]
(if-let [i (string/index-of s value i)]
(recur (conj acc i) (+ i (count value)))
acc)))
(defn wrapped-by?
"`pos` must be wrapped by `before` and `and` in string `value`, e.g. ((a|b))"
[value pos before end]
(let [before-matches (->> (get-string-all-indexes value before)
(map (fn [i] [i :before])))
end-matches (->> (get-string-all-indexes value end)
(map (fn [i] [i :end])))
indexes (sort-by first (concat before-matches end-matches [[pos :between]]))
ks (map second indexes)
q [:before :between :end]]
(true?
(reduce (fn [acc k]
(if (= q (conj acc k))
(reduced true)
(vec (take-last 2 (conj acc k)))))
[]
ks))))
(defn get-graph-name-from-path
[path]
(when (string? path)
(let [parts (->> (string/split path #"/")
(take-last 2))]
(-> (if (not= (first parts) "0")
(string/join "/" parts)
(last parts))
js/decodeURI))))
(defn extract-all-block-refs
[content]
(map second (re-seq #"\(\(([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})\)\)" content)))

View File

@@ -1,24 +0,0 @@
(ns ^:nbb-compatible logseq.graph-parser.config
"Config that is shared between graph-parser and rest of app"
(:require [logseq.graph-parser.util :as gp-util]
[clojure.string :as string]))
(defonce local-assets-dir "assets")
(defn local-asset?
[s]
(gp-util/safe-re-find (re-pattern (str "^[./]*" local-assets-dir)) s))
(defonce default-draw-directory "draws")
(defn draw?
[path]
(string/starts-with? path default-draw-directory))
;; TODO: rename
(defonce mldoc-support-formats
#{:org :markdown :md})
(defn mldoc-support?
[format]
(contains? mldoc-support-formats (keyword format)))

View File

@@ -1,12 +0,0 @@
(ns frontend.db.config
(:require [frontend.db.conn :as conn]
[frontend.state :as state]
[frontend.db.persist :as db-persist]))
(defn destroy-db! [] (conn/destroy-all!))
(defn clear-current-repo []
(let [current-repo (state/get-current-repo)]
(db-persist/delete-graph! current-repo)
(destroy-db!)
(conn/start! nil current-repo)))

View File

@@ -2,7 +2,7 @@
(:require [cljs.test :refer [async deftest use-fixtures are is]]
[clojure.edn :as edn]
[frontend.handler.export :as export]
[frontend.db.config :as config]
[frontend.test.helper :as test-helper]
[frontend.handler.repo :as repo-handler]
[frontend.state :as state]
[promesa.core :as p]))
@@ -33,10 +33,10 @@
(use-fixtures :once
{:before (fn []
(async done
(config/clear-current-repo)
(test-helper/clear-current-repo)
(p/let [_ (import-test-data!)]
(done))))
:after config/destroy-db!})
:after test-helper/destroy-test-db!})
(deftest export-blocks-as-markdown
(are [expect block-uuid-s]

View File

@@ -1,135 +1,20 @@
(ns frontend.handler.repo-test
(:require [cljs.test :refer [deftest use-fixtures is testing]]
(:require [cljs.test :refer [deftest use-fixtures]]
[frontend.handler.repo :as repo-handler]
[frontend.test.helper :as test-helper]
[frontend.test.docs-graph-helper :as docs-graph-helper]
[datascript.core :as d]
[logseq.graph-parser.cli :as gp-cli]
[logseq.graph-parser.test.docs-graph-helper :as docs-graph-helper]
[frontend.db.conn :as conn]))
(use-fixtures :each {:before test-helper/start-test-db!
:after test-helper/destroy-test-db!})
(defn- get-top-block-properties
[db]
(->> (d/q '[:find (pull ?b [*])
:where
[?b :block/properties]
[(missing? $ ?b :block/name)]]
db)
(map first)
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
(apply merge-with +)
(filter #(>= (val %) 5))
(into {})))
(defn- get-all-page-properties
[db]
(->> (d/q '[:find (pull ?b [*])
:where
[?b :block/properties]
[?b :block/name]]
db)
(map first)
(map (fn [m] (zipmap (keys (:block/properties m)) (repeat 1))))
(apply merge-with +)
(into {})))
;; Integration test that test parsing a large graph like docs
(deftest ^:integration parse-and-load-files-to-db
(let [graph-dir "src/test/docs"
_ (docs-graph-helper/clone-docs-repo-if-not-exists graph-dir)
files (docs-graph-helper/build-graph-files graph-dir)
files (gp-cli/build-graph-files graph-dir)
_ (repo-handler/parse-files-and-load-to-db! test-helper/test-db files {:re-render? false})
db (conn/get-db test-helper/test-db)]
;; Counts assertions help check for no major regressions. These counts should
;; only increase over time as the docs graph rarely has deletions
(testing "Counts"
(is (= 206 (count files)) "Correct file count")
(is (= 40888 (count (d/datoms db :eavt))) "Correct datoms count")
(is (= 3597
(ffirst
(d/q '[:find (count ?b)
:where [?b :block/path-refs ?bp] [?bp :block/name]] db)))
"Correct referenced blocks count")
(is (= 21
(ffirst
(d/q '[:find (count ?b)
:where [?b :block/content ?content]
[(clojure.string/includes? ?content "+BEGIN_QUERY")]]
db)))
"Advanced query count"))
(testing "Query based stats"
(is (= (set (map :file/path files))
(->> (d/q '[:find (pull ?b [* {:block/file [:file/path]}])
:where [?b :block/name] [?b :block/file]]
db)
(map (comp #(get-in % [:block/file :file/path]) first))
set))
"Journal and pages files on disk should equal ones in db")
(is (= (count (filter #(re-find #"journals/" (:file/path %))
files))
(->> (d/q '[:find (count ?b)
:where
[?b :block/journal? true]
[?b :block/name]
[?b :block/file]]
db)
ffirst))
"Journal page count on disk equals count in db")
(is (= {"CANCELED" 2 "DONE" 6 "LATER" 4 "NOW" 5}
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/marker] ]
db)
(map first)
(group-by :block/marker)
(map (fn [[k v]] [k (count v)]))
(into {})))
"Task marker counts")
(is (= {:markdown 3140 :org 460}
(->> (d/q '[:find (pull ?b [*]) :where [?b :block/format]] db)
(map first)
(group-by :block/format)
(map (fn [[k v]] [k (count v)]))
(into {})))
"Block format counts")
(is (= {:title 98 :id 98
:updated-at 47 :created-at 47
:card-last-score 6 :card-repeats 6 :card-next-schedule 6
:card-last-interval 6 :card-ease-factor 6 :card-last-reviewed 6
:alias 6}
(get-top-block-properties db))
"Counts for top block properties")
(is (= {:title 98
:alias 6
:tags 2 :permalink 2
:name 1 :type 1 :related 1 :sample 1 :click 1 :id 1 :example 1}
(get-all-page-properties db))
"Counts for all page properties")
(is (= {:block/scheduled 2
:block/priority 4
:block/deadline 1
:block/collapsed? 22
:block/heading-level 57
:block/repeated? 1}
(->> [:block/scheduled :block/priority :block/deadline :block/collapsed?
:block/heading-level :block/repeated?]
(map (fn [attr]
[attr
(ffirst (d/q [:find (list 'count '?b) :where ['?b attr]]
db))]))
(into {})))
"Counts for blocks with common block attributes")
(is (= #{"term" "setting" "book" "Templates" "Query" "Query/table" "page"}
(->> (d/q '[:find (pull ?n [*]) :where [?b :block/namespace ?n]] db)
(map (comp :block/original-name first))
set))
"Has correct namespaces"))))
(docs-graph-helper/docs-graph-assertions db (map :file/path files))))

View File

@@ -1,34 +0,0 @@
(ns ^:nbb-compatible frontend.test.docs-graph-helper
"Helper fns for running tests against docs graph"
(:require ["fs" :as fs]
["child_process" :as child-process]
[clojure.string :as string]))
(defn slurp
"Like clojure.core/slurp"
[file]
(str (fs/readFileSync file)))
(defn- sh
"Run shell cmd synchronously and print to inherited streams by default. Aims
to be similar to babashka.tasks/shell"
[cmd opts]
(child-process/spawnSync (first cmd)
(clj->js (rest cmd))
(clj->js (merge {:stdio "inherit"} opts))))
(defn build-graph-files
[dir]
(let [files (->> (str (.-stdout (sh ["git" "ls-files"]
{:cwd dir :stdio nil})))
string/split-lines
(filter #(re-find #"^(pages|journals)" %))
(map #(str dir "/" %)))]
(mapv #(hash-map :file/path % :file/content (slurp %)) files)))
(defn clone-docs-repo-if-not-exists
[dir]
(when-not (.existsSync fs dir)
(sh ["git" "clone" "--depth" "1" "-b" "v0.6.7" "-c" "advice.detachedHead=false"
"https://github.com/logseq/docs" dir] {})))

View File

@@ -1,7 +1,7 @@
(ns frontend.test.fixtures
(:require [datascript.core :as d]
[frontend.config :as config]
[frontend.db-schema :as db-schema]
[logseq.graph-parser.db.schema :as db-schema]
[frontend.db.conn :as conn]
[frontend.db.react :as react]
[frontend.state :as state]

View File

@@ -1,17 +1,25 @@
(ns frontend.test.helper
"Common helper fns for tests"
(:require [frontend.handler.repo :as repo-handler]
[frontend.db.persist :as db-persist]
[frontend.state :as state]
[frontend.db.conn :as conn]))
(defonce test-db "test-db")
(defn start-test-db!
[]
(conn/start! nil test-db))
(conn/start! test-db))
(defn destroy-test-db!
[]
(conn/destroy-all!))
(defn clear-current-repo []
(let [current-repo (state/get-current-repo)]
(db-persist/delete-graph! current-repo)
(destroy-test-db!)
(conn/start! current-repo)))
(defn load-test-files [files]
(repo-handler/parse-files-and-load-to-db! test-db files {:re-render? false}))

View File

@@ -0,0 +1,33 @@
(ns frontend.util.text-test
(:require [cljs.test :refer [are deftest]]
[frontend.util.text :as text-util]))
(deftest test-add-timestamp
[]
(are [x y] (= x y)
(text-util/add-timestamp "LATER hello world\nhello"
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>\nhello"
(text-util/add-timestamp "LATER hello world "
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>"
(text-util/add-timestamp "LATER hello world\nfoo:: bar\ntest"
"scheduled"
"<2021-08-25 Wed>")
"LATER hello world\nSCHEDULED: <2021-08-25 Wed>\nfoo:: bar\ntest"))
(deftest get-string-all-indexes
[]
(are [x y] (= x y)
(text-util/get-string-all-indexes "[[hello]] [[world]]" "[[")
[0 10]
(text-util/get-string-all-indexes "abc abc ab" "ab")
[0 4 8]
(text-util/get-string-all-indexes "a.c a.c ab" "a.")
[0 4]))