mirror of
https://github.com/logseq/logseq.git
synced 2026-04-24 22:25:01 +00:00
add publish worker
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -67,6 +67,8 @@ packages/ui/.storybook/cljs
|
||||
deps/shui/.lsp
|
||||
deps/shui/.lsp-cache
|
||||
deps/shui/.clj-kondo
|
||||
deps/publish/worker/.wrangler
|
||||
|
||||
tx-log*
|
||||
clj-e2e/.wally
|
||||
clj-e2e/resources
|
||||
|
||||
6
deps/publish/README.md
vendored
6
deps/publish/README.md
vendored
@@ -3,7 +3,11 @@
|
||||
Shared library for page publishing (snapshot payloads, SSR helpers, shared schemas, and storage contracts).
|
||||
|
||||
The Cloudflare Durable Object implementation is expected to use SQLite with the
|
||||
Logseq datascript fork layered on top.
|
||||
Logseq datascript fork layered on top. Page publish payloads are expected to
|
||||
send datoms (transit) so the DO can reconstruct/query datascript state.
|
||||
|
||||
See `deps/publish/worker` for a Cloudflare Worker skeleton that stores transit
|
||||
blobs in R2 and metadata in a SQLite-backed Durable Object.
|
||||
|
||||
## API
|
||||
|
||||
|
||||
38
deps/publish/worker/README.md
vendored
Normal file
38
deps/publish/worker/README.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
## Cloudflare Publish Worker (Skeleton)
|
||||
|
||||
This worker accepts publish payloads and stores transit blobs in R2 while keeping
|
||||
metadata in a Durable Object backed by SQLite.
|
||||
|
||||
### Bindings
|
||||
|
||||
- `PUBLISH_META_DO`: Durable Object namespace
|
||||
- `PUBLISH_R2`: R2 bucket
|
||||
- `R2_ACCOUNT_ID`: Cloudflare account id for signing
|
||||
- `R2_BUCKET`: R2 bucket name for signing
|
||||
- `R2_ACCESS_KEY_ID`: R2 access key for signing
|
||||
- `R2_SECRET_ACCESS_KEY`: R2 secret key for signing
|
||||
- `COGNITO_JWKS_URL`: JWKS URL for Cognito user pool
|
||||
- `COGNITO_ISSUER`: Cognito issuer URL
|
||||
- `COGNITO_CLIENT_ID`: Cognito client ID
|
||||
- `DEV_SKIP_AUTH`: set to `true` to bypass JWT verification in local dev
|
||||
|
||||
### Routes
|
||||
|
||||
- `POST /pages`
|
||||
- Requires `Authorization: Bearer <JWT>`
|
||||
- Requires `x-publish-meta` header (JSON)
|
||||
- Body is transit payload (stored in R2 as-is)
|
||||
- `GET /pages/:page-uuid`
|
||||
- Returns metadata for the page
|
||||
- `GET /pages/:page-uuid/transit`
|
||||
- Returns JSON with a signed R2 URL and `etag`
|
||||
- `GET /pages`
|
||||
- Lists metadata entries (from the index DO)
|
||||
|
||||
### Notes
|
||||
|
||||
- This is a starter implementation. Integrate with your deployment tooling
|
||||
(wrangler, etc.) as needed.
|
||||
- For local testing, run `wrangler dev` and use `deps/publish/worker/scripts/dev_test.sh`.
|
||||
- If you switch schema versions, clear local DO state with
|
||||
`deps/publish/worker/scripts/clear_dev_state.sh`.
|
||||
12
deps/publish/worker/scripts/clear_dev_state.sh
vendored
Executable file
12
deps/publish/worker/scripts/clear_dev_state.sh
vendored
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
GRAPH_UUID=${GRAPH_UUID:-"00000000-0000-0000-0000-000000000000"}
|
||||
|
||||
cat <<MSG
|
||||
To clear local Durable Object state, remove the miniflare state directory:
|
||||
rm -rf .wrangler/state/v3/durable-objects/${GRAPH_UUID}
|
||||
|
||||
If your dev environment uses a different state path, locate it under:
|
||||
.wrangler/state/v3/
|
||||
MSG
|
||||
32
deps/publish/worker/scripts/dev_test.sh
vendored
Executable file
32
deps/publish/worker/scripts/dev_test.sh
vendored
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BASE_URL=${BASE_URL:-"http://127.0.0.1:8787"}
|
||||
GRAPH_UUID=${GRAPH_UUID:-"00000000-0000-0000-0000-000000000000"}
|
||||
PAGE_UUID=${PAGE_UUID:-"00000000-0000-0000-0000-000000000001"}
|
||||
|
||||
META=$(cat <<JSON
|
||||
{"page-uuid":"${PAGE_UUID}","block-count":1,"schema-version":"0","publish/format":"transit","publish/compression":"none","publish/content-hash":"dev","publish/content-length":1,"publish/graph":"${GRAPH_UUID}","publish/created-at":0}
|
||||
JSON
|
||||
)
|
||||
|
||||
PAYLOAD="{}"
|
||||
|
||||
curl -sS -X POST "${BASE_URL}/pages" \
|
||||
-H "content-type: application/transit+json" \
|
||||
-H "x-publish-meta: ${META}" \
|
||||
--data-binary "${PAYLOAD}"
|
||||
|
||||
echo
|
||||
|
||||
curl -sS "${BASE_URL}/pages/${PAGE_UUID}"
|
||||
|
||||
echo
|
||||
|
||||
curl -sS "${BASE_URL}/pages/${PAGE_UUID}/transit"
|
||||
|
||||
echo
|
||||
|
||||
curl -sS "${BASE_URL}/pages"
|
||||
|
||||
echo
|
||||
501
deps/publish/worker/src/index.js
vendored
Normal file
501
deps/publish/worker/src/index.js
vendored
Normal file
@@ -0,0 +1,501 @@
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
function jsonResponse(data, status = 200) {
|
||||
return new Response(JSON.stringify(data), {
|
||||
status,
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function unauthorized() {
|
||||
return jsonResponse({ error: "unauthorized" }, 401);
|
||||
}
|
||||
|
||||
function badRequest(message) {
|
||||
return jsonResponse({ error: message }, 400);
|
||||
}
|
||||
|
||||
function base64UrlToUint8Array(input) {
|
||||
const pad = input.length % 4 ? "=".repeat(4 - (input.length % 4)) : "";
|
||||
const base64 = (input + pad).replace(/-/g, "+").replace(/_/g, "/");
|
||||
const raw = atob(base64);
|
||||
const bytes = new Uint8Array(raw.length);
|
||||
for (let i = 0; i < raw.length; i += 1) {
|
||||
bytes[i] = raw.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
function getSqlRows(result) {
|
||||
if (!result) return [];
|
||||
if (typeof result.toArray === "function") return result.toArray();
|
||||
if (typeof result[Symbol.iterator] === "function") {
|
||||
return Array.from(result);
|
||||
}
|
||||
if (Array.isArray(result.results)) return result.results;
|
||||
if (Array.isArray(result.rows)) return result.rows;
|
||||
if (Array.isArray(result)) {
|
||||
if (result.length === 0) return [];
|
||||
const first = result[0];
|
||||
if (first && Array.isArray(first.results)) return first.results;
|
||||
if (first && Array.isArray(first.rows)) return first.rows;
|
||||
return result;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function toHex(buffer) {
|
||||
return [...new Uint8Array(buffer)]
|
||||
.map((b) => b.toString(16).padStart(2, "0"))
|
||||
.join("");
|
||||
}
|
||||
|
||||
async function sha256Hex(message) {
|
||||
const data = new TextEncoder().encode(message);
|
||||
const digest = await crypto.subtle.digest("SHA-256", data);
|
||||
return toHex(digest);
|
||||
}
|
||||
|
||||
async function hmacSha256(key, message) {
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
key,
|
||||
{ name: "HMAC", hash: "SHA-256" },
|
||||
false,
|
||||
["sign"]
|
||||
);
|
||||
return crypto.subtle.sign("HMAC", cryptoKey, message);
|
||||
}
|
||||
|
||||
function encodeRfc3986(value) {
|
||||
return encodeURIComponent(value).replace(/[!'()*]/g, (c) =>
|
||||
`%${c.charCodeAt(0).toString(16).toUpperCase()}`
|
||||
);
|
||||
}
|
||||
|
||||
function encodePath(path) {
|
||||
return path
|
||||
.split("/")
|
||||
.map((part) => encodeRfc3986(part))
|
||||
.join("/");
|
||||
}
|
||||
|
||||
async function getSignatureKey(secret, dateStamp, region, service) {
|
||||
const kDate = await hmacSha256(
|
||||
new TextEncoder().encode(`AWS4${secret}`),
|
||||
new TextEncoder().encode(dateStamp)
|
||||
);
|
||||
const kRegion = await hmacSha256(kDate, new TextEncoder().encode(region));
|
||||
const kService = await hmacSha256(kRegion, new TextEncoder().encode(service));
|
||||
return hmacSha256(kService, new TextEncoder().encode("aws4_request"));
|
||||
}
|
||||
|
||||
async function presignR2Url(r2Key, env, expiresSeconds = 300) {
|
||||
const region = "auto";
|
||||
const service = "s3";
|
||||
const host = `${env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`;
|
||||
const bucket = env.R2_BUCKET;
|
||||
const method = "GET";
|
||||
const now = new Date();
|
||||
const amzDate = now
|
||||
.toISOString()
|
||||
.replace(/[:-]|\.\d{3}/g, "");
|
||||
const dateStamp = amzDate.slice(0, 8);
|
||||
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
|
||||
|
||||
const params = [
|
||||
["X-Amz-Algorithm", "AWS4-HMAC-SHA256"],
|
||||
["X-Amz-Credential", `${env.R2_ACCESS_KEY_ID}/${credentialScope}`],
|
||||
["X-Amz-Date", amzDate],
|
||||
["X-Amz-Expires", String(expiresSeconds)],
|
||||
["X-Amz-SignedHeaders", "host"],
|
||||
];
|
||||
params.sort((a, b) => (a[0] < b[0] ? -1 : 1));
|
||||
const canonicalQueryString = params
|
||||
.map(([k, v]) => `${encodeRfc3986(k)}=${encodeRfc3986(v)}`)
|
||||
.join("&");
|
||||
|
||||
const canonicalUri = `/${bucket}/${encodePath(r2Key)}`;
|
||||
const canonicalHeaders = `host:${host}\n`;
|
||||
const signedHeaders = "host";
|
||||
const payloadHash = "UNSIGNED-PAYLOAD";
|
||||
const canonicalRequest = [
|
||||
method,
|
||||
canonicalUri,
|
||||
canonicalQueryString,
|
||||
canonicalHeaders,
|
||||
signedHeaders,
|
||||
payloadHash,
|
||||
].join("\n");
|
||||
|
||||
const stringToSign = [
|
||||
"AWS4-HMAC-SHA256",
|
||||
amzDate,
|
||||
credentialScope,
|
||||
await sha256Hex(canonicalRequest),
|
||||
].join("\n");
|
||||
|
||||
const signingKey = await getSignatureKey(
|
||||
env.R2_SECRET_ACCESS_KEY,
|
||||
dateStamp,
|
||||
region,
|
||||
service
|
||||
);
|
||||
const signature = toHex(await hmacSha256(signingKey, new TextEncoder().encode(stringToSign)));
|
||||
const signedQuery = `${canonicalQueryString}&X-Amz-Signature=${signature}`;
|
||||
|
||||
return `https://${host}${canonicalUri}?${signedQuery}`;
|
||||
}
|
||||
|
||||
function decodeJwtPart(part) {
|
||||
const bytes = base64UrlToUint8Array(part);
|
||||
return JSON.parse(textDecoder.decode(bytes));
|
||||
}
|
||||
|
||||
async function importRsaKey(jwk) {
|
||||
return crypto.subtle.importKey(
|
||||
"jwk",
|
||||
jwk,
|
||||
{
|
||||
name: "RSASSA-PKCS1-v1_5",
|
||||
hash: "SHA-256",
|
||||
},
|
||||
false,
|
||||
["verify"]
|
||||
);
|
||||
}
|
||||
|
||||
async function verifyJwt(token, env) {
|
||||
const parts = token.split(".");
|
||||
if (parts.length !== 3) {
|
||||
return null;
|
||||
}
|
||||
const [headerPart, payloadPart, signaturePart] = parts;
|
||||
const header = decodeJwtPart(headerPart);
|
||||
const payload = decodeJwtPart(payloadPart);
|
||||
|
||||
if (payload.iss !== env.COGNITO_ISSUER) {
|
||||
return null;
|
||||
}
|
||||
if (payload.aud !== env.COGNITO_CLIENT_ID) {
|
||||
return null;
|
||||
}
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp && payload.exp < now) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const jwksResp = await fetch(env.COGNITO_JWKS_URL);
|
||||
if (!jwksResp.ok) {
|
||||
return null;
|
||||
}
|
||||
const jwks = await jwksResp.json();
|
||||
const key = (jwks.keys || []).find((k) => k.kid === header.kid);
|
||||
if (!key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cryptoKey = await importRsaKey(key);
|
||||
const data = new TextEncoder().encode(`${headerPart}.${payloadPart}`);
|
||||
const signature = base64UrlToUint8Array(signaturePart);
|
||||
const ok = await crypto.subtle.verify("RSASSA-PKCS1-v1_5", cryptoKey, signature, data);
|
||||
return ok ? payload : null;
|
||||
}
|
||||
|
||||
async function handlePostPages(request, env) {
|
||||
const authHeader = request.headers.get("authorization") || "";
|
||||
const token = authHeader.startsWith("Bearer ") ? authHeader.slice(7) : null;
|
||||
const devSkipAuth = env.DEV_SKIP_AUTH === "true";
|
||||
if (!token && !devSkipAuth) {
|
||||
return unauthorized();
|
||||
}
|
||||
|
||||
const claims = devSkipAuth ? { sub: "dev" } : await verifyJwt(token, env);
|
||||
if (!claims && !devSkipAuth) {
|
||||
return unauthorized();
|
||||
}
|
||||
|
||||
const metaHeader = request.headers.get("x-publish-meta");
|
||||
if (!metaHeader) {
|
||||
return badRequest("missing x-publish-meta header");
|
||||
}
|
||||
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(metaHeader);
|
||||
} catch (_err) {
|
||||
return badRequest("invalid x-publish-meta header");
|
||||
}
|
||||
|
||||
if (!meta["publish/content-hash"] || !meta["publish/graph"] || !meta["page-uuid"]) {
|
||||
return badRequest("missing publish metadata");
|
||||
}
|
||||
|
||||
const body = await request.arrayBuffer();
|
||||
const r2Key = `publish/${meta["publish/graph"]}/${meta["publish/content-hash"]}.transit`;
|
||||
|
||||
const existing = await env.PUBLISH_R2.head(r2Key);
|
||||
if (!existing) {
|
||||
await env.PUBLISH_R2.put(r2Key, body, {
|
||||
httpMetadata: {
|
||||
contentType: "application/transit+json",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const doId = env.PUBLISH_META_DO.idFromName(meta["page-uuid"]);
|
||||
const doStub = env.PUBLISH_META_DO.get(doId);
|
||||
const metaResponse = await doStub.fetch("https://publish/pages", {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
...meta,
|
||||
r2_key: r2Key,
|
||||
owner_sub: claims.sub,
|
||||
updated_at: Date.now(),
|
||||
}),
|
||||
});
|
||||
|
||||
if (!metaResponse.ok) {
|
||||
return jsonResponse({ error: "metadata store failed" }, 500);
|
||||
}
|
||||
|
||||
const indexId = env.PUBLISH_META_DO.idFromName("index");
|
||||
const indexStub = env.PUBLISH_META_DO.get(indexId);
|
||||
await indexStub.fetch("https://publish/pages", {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
...meta,
|
||||
r2_key: r2Key,
|
||||
owner_sub: claims.sub,
|
||||
updated_at: Date.now(),
|
||||
}),
|
||||
});
|
||||
|
||||
return jsonResponse({
|
||||
page_uuid: meta["page-uuid"],
|
||||
r2_key: r2Key,
|
||||
updated_at: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
async function handleGetPage(request, env) {
|
||||
const url = new URL(request.url);
|
||||
const pageUuid = url.pathname.split("/")[2];
|
||||
if (!pageUuid) {
|
||||
return badRequest("missing page uuid");
|
||||
}
|
||||
const doId = env.PUBLISH_META_DO.idFromName(pageUuid);
|
||||
const doStub = env.PUBLISH_META_DO.get(doId);
|
||||
const metaResponse = await doStub.fetch(`https://publish/pages/${pageUuid}`);
|
||||
if (!metaResponse.ok) {
|
||||
return jsonResponse({ error: "not found" }, 404);
|
||||
}
|
||||
const meta = await metaResponse.json();
|
||||
const etag = meta["publish/content-hash"];
|
||||
const ifNoneMatch = request.headers.get("if-none-match");
|
||||
if (etag && ifNoneMatch && ifNoneMatch.replace(/\"/g, "") === etag) {
|
||||
return new Response(null, {
|
||||
status: 304,
|
||||
headers: {
|
||||
etag,
|
||||
},
|
||||
});
|
||||
}
|
||||
return jsonResponse(meta, 200);
|
||||
}
|
||||
|
||||
async function handleGetPageTransit(request, env) {
|
||||
const url = new URL(request.url);
|
||||
const pageUuid = url.pathname.split("/")[2];
|
||||
if (!pageUuid) {
|
||||
return badRequest("missing page uuid");
|
||||
}
|
||||
const doId = env.PUBLISH_META_DO.idFromName(pageUuid);
|
||||
const doStub = env.PUBLISH_META_DO.get(doId);
|
||||
const metaResponse = await doStub.fetch(`https://publish/pages/${pageUuid}`);
|
||||
if (!metaResponse.ok) {
|
||||
return jsonResponse({ error: "not found" }, 404);
|
||||
}
|
||||
const meta = await metaResponse.json();
|
||||
if (!meta.r2_key) {
|
||||
return jsonResponse({ error: "missing transit" }, 404);
|
||||
}
|
||||
|
||||
const etag = meta["publish/content-hash"];
|
||||
const ifNoneMatch = request.headers.get("if-none-match");
|
||||
if (etag && ifNoneMatch && ifNoneMatch.replace(/\"/g, "") === etag) {
|
||||
return new Response(null, {
|
||||
status: 304,
|
||||
headers: {
|
||||
etag,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const signedUrl = await presignR2Url(meta.r2_key, env);
|
||||
return jsonResponse(
|
||||
{
|
||||
url: signedUrl,
|
||||
expires_in: 300,
|
||||
etag,
|
||||
},
|
||||
200
|
||||
);
|
||||
}
|
||||
|
||||
async function handleListPages(request, env) {
|
||||
const doId = env.PUBLISH_META_DO.idFromName("index");
|
||||
const doStub = env.PUBLISH_META_DO.get(doId);
|
||||
const metaResponse = await doStub.fetch("https://publish/pages", {
|
||||
method: "GET",
|
||||
});
|
||||
if (!metaResponse.ok) {
|
||||
return jsonResponse({ error: "not found" }, 404);
|
||||
}
|
||||
const meta = await metaResponse.json();
|
||||
return jsonResponse(meta, 200);
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request, env) {
|
||||
const url = new URL(request.url);
|
||||
if (url.pathname === "/pages" && request.method === "POST") {
|
||||
return handlePostPages(request, env);
|
||||
}
|
||||
if (url.pathname === "/pages" && request.method === "GET") {
|
||||
return handleListPages(request, env);
|
||||
}
|
||||
if (url.pathname.startsWith("/pages/") && request.method === "GET") {
|
||||
const parts = url.pathname.split("/");
|
||||
if (parts[3] === "transit") {
|
||||
return handleGetPageTransit(request, env);
|
||||
}
|
||||
return handleGetPage(request, env);
|
||||
}
|
||||
return jsonResponse({ error: "not found" }, 404);
|
||||
},
|
||||
};
|
||||
|
||||
export class PublishMetaDO extends DurableObject {
|
||||
constructor(state, env) {
|
||||
super(state, env);
|
||||
this.state = state;
|
||||
this.env = env;
|
||||
this.sql = state.storage.sql;
|
||||
}
|
||||
|
||||
async initSchema() {
|
||||
const cols = getSqlRows(this.sql.exec("PRAGMA table_info(pages);"));
|
||||
const hasLegacyId = cols.some((col) => col.name === "page_id");
|
||||
if (hasLegacyId) {
|
||||
this.sql.exec("DROP TABLE IF EXISTS pages;");
|
||||
}
|
||||
this.sql.exec(`
|
||||
CREATE TABLE IF NOT EXISTS pages (
|
||||
page_uuid TEXT NOT NULL,
|
||||
graph TEXT NOT NULL,
|
||||
schema_version TEXT,
|
||||
block_count INTEGER,
|
||||
content_hash TEXT NOT NULL,
|
||||
content_length INTEGER,
|
||||
r2_key TEXT NOT NULL,
|
||||
owner_sub TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
PRIMARY KEY (graph, page_uuid)
|
||||
);
|
||||
`);
|
||||
}
|
||||
|
||||
async fetch(request) {
|
||||
await this.initSchema();
|
||||
if (request.method === "POST") {
|
||||
const body = await request.json();
|
||||
this.sql.exec(
|
||||
`
|
||||
INSERT INTO pages (
|
||||
page_uuid,
|
||||
graph,
|
||||
schema_version,
|
||||
block_count,
|
||||
content_hash,
|
||||
content_length,
|
||||
r2_key,
|
||||
owner_sub,
|
||||
created_at,
|
||||
updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(graph, page_uuid) DO UPDATE SET
|
||||
page_uuid=excluded.page_uuid,
|
||||
schema_version=excluded.schema_version,
|
||||
block_count=excluded.block_count,
|
||||
content_hash=excluded.content_hash,
|
||||
content_length=excluded.content_length,
|
||||
r2_key=excluded.r2_key,
|
||||
owner_sub=excluded.owner_sub,
|
||||
updated_at=excluded.updated_at;
|
||||
`,
|
||||
body["page-uuid"],
|
||||
body["publish/graph"],
|
||||
body["schema-version"],
|
||||
body["block-count"],
|
||||
body["publish/content-hash"],
|
||||
body["publish/content-length"],
|
||||
body["r2_key"],
|
||||
body["owner_sub"],
|
||||
body["publish/created-at"],
|
||||
body["updated_at"]
|
||||
);
|
||||
|
||||
return jsonResponse({ ok: true });
|
||||
}
|
||||
|
||||
if (request.method === "GET") {
|
||||
const url = new URL(request.url);
|
||||
const parts = url.pathname.split("/");
|
||||
const pageUuid = parts[2];
|
||||
if (pageUuid) {
|
||||
const result = this.sql.exec(
|
||||
`
|
||||
SELECT page_uuid, graph, schema_version, block_count,
|
||||
content_hash, content_length, r2_key, owner_sub, created_at, updated_at
|
||||
FROM pages WHERE page_uuid = ? LIMIT 1;
|
||||
`,
|
||||
pageUuid
|
||||
);
|
||||
const rows = getSqlRows(result);
|
||||
const row = rows[0];
|
||||
if (!row) {
|
||||
return jsonResponse({ error: "not found" }, 404);
|
||||
}
|
||||
return jsonResponse({
|
||||
...row,
|
||||
"publish/content-hash": row.content_hash,
|
||||
"publish/content-length": row.content_length,
|
||||
});
|
||||
}
|
||||
|
||||
const result = this.sql.exec(`
|
||||
SELECT page_uuid, graph, schema_version, block_count,
|
||||
content_hash, content_length, r2_key, owner_sub, created_at, updated_at
|
||||
FROM pages ORDER BY updated_at DESC;
|
||||
`);
|
||||
const rows = getSqlRows(result);
|
||||
return jsonResponse({
|
||||
pages: rows.map((row) => ({
|
||||
...row,
|
||||
"publish/content-hash": row.content_hash,
|
||||
"publish/content-length": row.content_length,
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
return jsonResponse({ error: "method not allowed" }, 405);
|
||||
}
|
||||
}
|
||||
25
deps/publish/worker/wrangler.toml
vendored
Normal file
25
deps/publish/worker/wrangler.toml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name = "logseq-publish"
|
||||
main = "src/index.js"
|
||||
compatibility_date = "2024-11-01"
|
||||
|
||||
[[durable_objects.bindings]]
|
||||
name = "PUBLISH_META_DO"
|
||||
class_name = "PublishMetaDO"
|
||||
|
||||
[[migrations]]
|
||||
tag = "v2"
|
||||
new_sqlite_classes = ["PublishMetaDO"]
|
||||
|
||||
[[r2_buckets]]
|
||||
binding = "PUBLISH_R2"
|
||||
bucket_name = "logseq-publish-dev"
|
||||
|
||||
[vars]
|
||||
COGNITO_JWKS_URL = "https://cognito-idp.us-east-1.amazonaws.com/POOL_ID/.well-known/jwks.json"
|
||||
COGNITO_ISSUER = "https://cognito-idp.us-east-1.amazonaws.com/POOL_ID"
|
||||
COGNITO_CLIENT_ID = "CLIENT_ID"
|
||||
R2_ACCOUNT_ID = "YOUR_ACCOUNT_ID"
|
||||
R2_BUCKET = "logseq-publish-dev"
|
||||
R2_ACCESS_KEY_ID = "R2_ACCESS_KEY_ID"
|
||||
R2_SECRET_ACCESS_KEY = "R2_SECRET_ACCESS_KEY"
|
||||
DEV_SKIP_AUTH = "true"
|
||||
@@ -29,6 +29,8 @@
|
||||
(do (def LOGIN-URL
|
||||
"https://logseq-prod.auth.us-east-1.amazoncognito.com/login?client_id=3c7np6bjtb4r1k1bi9i049ops5&response_type=code&scope=email+openid+phone&redirect_uri=logseq%3A%2F%2Fauth-callback")
|
||||
(def API-DOMAIN "api.logseq.com")
|
||||
(def PUBLISH-API-DOMAIN "publish.logseq.com")
|
||||
(def PUBLISH-API-BASE (str "https://" PUBLISH-API-DOMAIN))
|
||||
(def COGNITO-IDP "https://cognito-idp.us-east-1.amazonaws.com/")
|
||||
(def COGNITO-CLIENT-ID "69cs1lgme7p8kbgld8n5kseii6")
|
||||
(def REGION "us-east-1")
|
||||
@@ -39,6 +41,8 @@
|
||||
(do (def LOGIN-URL
|
||||
"https://logseq-test2.auth.us-east-2.amazoncognito.com/login?client_id=3ji1a0059hspovjq5fhed3uil8&response_type=code&scope=email+openid+phone&redirect_uri=logseq%3A%2F%2Fauth-callback")
|
||||
(def API-DOMAIN "api-dev.logseq.com")
|
||||
(def PUBLISH-API-DOMAIN "publish-dev.logseq.com")
|
||||
(def PUBLISH-API-BASE (str "https://" PUBLISH-API-DOMAIN))
|
||||
(def COGNITO-IDP "https://cognito-idp.us-east-2.amazonaws.com/")
|
||||
(def COGNITO-CLIENT-ID "1qi1uijg8b6ra70nejvbptis0q")
|
||||
(def REGION "us-east-2")
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
(ns frontend.handler.publish
|
||||
"Prepare publish payloads for pages."
|
||||
(:require [datascript.core :as d]
|
||||
[frontend.config :as config]
|
||||
[frontend.db :as db]
|
||||
[frontend.handler.notification :as notification]
|
||||
[frontend.state :as state]
|
||||
[frontend.util :as util]
|
||||
[logseq.db :as ldb]
|
||||
[logseq.db.common.entity-util :as entity-util]
|
||||
[logseq.db.frontend.schema :as db-schema]))
|
||||
[logseq.db.frontend.schema :as db-schema]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(defn- datom->vec
|
||||
[datom]
|
||||
@@ -36,20 +39,73 @@
|
||||
(map datom->vec (d/datoms db :eavt eid)))
|
||||
eids)]
|
||||
{:page (entity-util/entity->map page-entity)
|
||||
:page-id (:db/id page-entity)
|
||||
:page-uuid (:block/uuid page-entity)
|
||||
:block-count (count blocks)
|
||||
:schema-version (db-schema/schema-version->string db-schema/version)
|
||||
:datoms (vec datoms)}))
|
||||
|
||||
(defn- <sha256-hex
|
||||
[text]
|
||||
(p/let [encoder (js/TextEncoder.)
|
||||
data (.encode encoder text)
|
||||
digest (.digest (.-subtle js/crypto) "SHA-256" data)
|
||||
bytes (js/Uint8Array. digest)]
|
||||
(->> bytes
|
||||
(map (fn [b]
|
||||
(.padStart (.toString b 16) 2 "0")))
|
||||
(apply str))))
|
||||
|
||||
(defn- publish-endpoint
|
||||
[]
|
||||
(str config/PUBLISH-API-BASE "/pages"))
|
||||
|
||||
(defn- <post-publish!
|
||||
[payload]
|
||||
(let [token (state/get-auth-id-token)
|
||||
headers (cond-> {"content-type" "application/transit+json"}
|
||||
token (assoc "authorization" (str "Bearer " token)))]
|
||||
(p/let [body (ldb/write-transit-str payload)
|
||||
content-hash (<sha256-hex body)
|
||||
graph-uuid (some-> (ldb/get-graph-rtc-uuid (db/get-db)) str)
|
||||
_ (when-not graph-uuid
|
||||
(throw (ex-info "Missing graph UUID" {:repo (state/get-current-repo)})))
|
||||
publish-graph graph-uuid
|
||||
publish-meta {:page-uuid (:page-uuid payload)
|
||||
:block-count (:block-count payload)
|
||||
:schema-version (:schema-version payload)
|
||||
:publish/format :transit
|
||||
:publish/compression :none
|
||||
:publish/content-hash content-hash
|
||||
:publish/content-length (count body)
|
||||
:publish/graph publish-graph
|
||||
:publish/created-at (util/time-ms)}
|
||||
publish-body (assoc payload
|
||||
:publish/meta publish-meta)
|
||||
headers (assoc headers "x-publish-meta" (js/JSON.stringify (clj->js publish-meta)))
|
||||
resp (js/fetch (publish-endpoint)
|
||||
(clj->js {:method "POST"
|
||||
:headers headers
|
||||
:body (ldb/write-transit-str publish-body)}))]
|
||||
(if (.-ok resp)
|
||||
resp
|
||||
(p/let [body (.text resp)]
|
||||
(throw (ex-info "Publish failed"
|
||||
{:status (.-status resp)
|
||||
:body body})))))))
|
||||
|
||||
(defn publish-page!
|
||||
"Prepares the publish payload for a page. The upload step is stubbed for now."
|
||||
"Prepares and uploads the publish payload for a page."
|
||||
[page]
|
||||
(let [repo (state/get-current-repo)]
|
||||
(if-let [db* (and repo (db/get-db repo))]
|
||||
(if (and page (:db/id page))
|
||||
(let [payload (build-page-publish-datoms db* page)]
|
||||
(notification/show! "Publish payload prepared." :success)
|
||||
(js/console.log "Publish payload" (clj->js payload))
|
||||
payload)
|
||||
(notification/show! "Publishing page..." :success)
|
||||
(-> (<post-publish! payload)
|
||||
(p/then (fn [_resp]
|
||||
(notification/show! "Page published." :success)))
|
||||
(p/catch (fn [error]
|
||||
(js/console.error error)
|
||||
(notification/show! "Publish failed." :error)))))
|
||||
(notification/show! "Publish failed: invalid page." :error))
|
||||
(notification/show! "Publish failed: missing database." :error))))
|
||||
|
||||
96
src/main/frontend/publish/client.cljs
Normal file
96
src/main/frontend/publish/client.cljs
Normal file
@@ -0,0 +1,96 @@
|
||||
(ns frontend.publish.client
|
||||
"Client helpers for published page snapshots."
|
||||
(:require [frontend.config :as config]
|
||||
[frontend.state :as state]
|
||||
[logseq.db :as ldb]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(defn- <fetch-json
|
||||
[url headers]
|
||||
(p/let [resp (js/fetch url (clj->js {:headers headers}))]
|
||||
(cond
|
||||
(= 304 (.-status resp)) {:status 304}
|
||||
(.-ok resp) (p/let [data (.json resp)] {:status 200 :data data})
|
||||
:else (p/let [body (.text resp)]
|
||||
(throw (ex-info "Publish fetch failed" {:status (.-status resp) :body body}))))))
|
||||
|
||||
(defn- cache-key
|
||||
[page-uuid]
|
||||
(str "publish/" page-uuid))
|
||||
|
||||
(defn- get-cache
|
||||
[page-uuid]
|
||||
(when-let [raw (js/localStorage.getItem (cache-key page-uuid))]
|
||||
(try
|
||||
(js/JSON.parse raw)
|
||||
(catch :default _e nil))))
|
||||
|
||||
(defn- set-cache!
|
||||
[page-uuid value]
|
||||
(js/localStorage.setItem (cache-key page-uuid)
|
||||
(js/JSON.stringify (clj->js value))))
|
||||
|
||||
(defn <get-page-meta
|
||||
"Fetch metadata for a published page, honoring ETag if cached.
|
||||
|
||||
Returns {:status 200 :data <meta>} or {:status 304}.
|
||||
"
|
||||
[page-uuid]
|
||||
(let [cached (get-cache page-uuid)
|
||||
headers (cond-> {}
|
||||
(and cached (.-etag cached))
|
||||
(assoc "if-none-match" (.-etag cached)))]
|
||||
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" page-uuid)
|
||||
headers)]
|
||||
(if (= 304 (:status resp))
|
||||
resp
|
||||
(let [meta (js->clj (:data resp) :keywordize-keys true)
|
||||
etag (get meta :publish/content-hash)]
|
||||
(set-cache! page-uuid {:etag etag :meta meta})
|
||||
{:status 200 :data meta}))))
|
||||
|
||||
(defn <get-transit-url
|
||||
"Fetch a signed transit URL. Uses meta ETag caching if provided.
|
||||
|
||||
Returns {:status 200 :data {:url ... :etag ...}} or {:status 304}.
|
||||
"
|
||||
[page-uuid]
|
||||
(let [cached (get-cache page-uuid)
|
||||
headers (cond-> {}
|
||||
(and cached (.-etag cached))
|
||||
(assoc "if-none-match" (.-etag cached)))]
|
||||
(p/let [resp (<fetch-json (str config/PUBLISH-API-BASE "/pages/" page-uuid
|
||||
"/transit")
|
||||
headers)]
|
||||
(if (= 304 (:status resp))
|
||||
resp
|
||||
(let [data (js->clj (:data resp) :keywordize-keys true)]
|
||||
{:status 200 :data data}))))
|
||||
|
||||
(defn <get-published-transit
|
||||
"Fetch the published transit blob and return its text body.
|
||||
|
||||
If the metadata is unchanged, returns {:status 304}.
|
||||
"
|
||||
[page-uuid]
|
||||
(p/let [meta-resp (<get-page-meta page-uuid)]
|
||||
(if (= 304 (:status meta-resp))
|
||||
meta-resp
|
||||
(p/let [url-resp (<get-transit-url page-uuid)]
|
||||
(if (= 304 (:status url-resp))
|
||||
url-resp
|
||||
(let [url (get-in url-resp [:data :url])]
|
||||
(p/let [resp (js/fetch url)]
|
||||
(if (.-ok resp)
|
||||
(p/let [text (.text resp)]
|
||||
{:status 200
|
||||
:etag (get-in url-resp [:data :etag])
|
||||
:body text})
|
||||
(p/let [body (.text resp)]
|
||||
(throw (ex-info "Publish transit fetch failed"
|
||||
{:status (.-status resp) :body body})))))))))))
|
||||
|
||||
(defn get-graph-uuid
|
||||
"Returns the RTC graph UUID if available."
|
||||
[]
|
||||
(some-> (ldb/get-graph-rtc-uuid (state/get-current-repo)) str))
|
||||
Reference in New Issue
Block a user