Compare commits

..

2 Commits

Author SHA1 Message Date
Dax
ef89682d8a Merge branch 'dev' into migrate-json-migration 2026-02-18 17:53:44 -05:00
Dax Raad
aeffbfebba refactor: migrate src/storage/json-migration.ts from Bun.file() to Filesystem module
Replace Bun.file().json() with Filesystem.readJson():

- Update read() function to use Filesystem.readJson() for batch JSON reading

All 22 storage tests pass.
2026-02-18 10:45:32 -05:00
2 changed files with 39 additions and 32 deletions

View File

@@ -7,6 +7,7 @@ import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } fro
import { SessionShareTable } from "../share/share.sql"
import path from "path"
import { existsSync } from "fs"
import { Filesystem } from "../util/filesystem"
export namespace JsonMigration {
const log = Log.create({ service: "json-migration" })
@@ -82,7 +83,7 @@ export namespace JsonMigration {
const count = end - start
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Bun.file(files[start + i]).json()
tasks[i] = Filesystem.readJson(files[start + i])
}
const results = await Promise.allSettled(tasks)
const items = new Array(count)

View File

@@ -39,7 +39,7 @@ export namespace Storage {
cwd: path.join(project, projectDir),
absolute: true,
})) {
const json = await Filesystem.readJson<any>(msgFile)
const json = await Bun.file(msgFile).json()
worktree = json.path?.root
if (worktree) break
}
@@ -60,15 +60,18 @@ export namespace Storage {
if (!id) continue
projectID = id
await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
})
await Bun.write(
path.join(dir, "project", projectID + ".json"),
JSON.stringify({
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
}),
)
log.info(`migrating sessions for project ${projectID}`)
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
@@ -80,8 +83,8 @@ export namespace Storage {
sessionFile,
dest,
})
const session = await Filesystem.readJson<any>(sessionFile)
await Filesystem.writeJson(dest, session)
const session = await Bun.file(sessionFile).json()
await Bun.write(dest, JSON.stringify(session))
log.info(`migrating messages for session ${session.id}`)
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
cwd: fullProjectDir,
@@ -92,8 +95,8 @@ export namespace Storage {
msgFile,
dest,
})
const message = await Filesystem.readJson<any>(msgFile)
await Filesystem.writeJson(dest, message)
const message = await Bun.file(msgFile).json()
await Bun.write(dest, JSON.stringify(message))
log.info(`migrating parts for message ${message.id}`)
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
@@ -120,32 +123,35 @@ export namespace Storage {
cwd: dir,
absolute: true,
})) {
const session = await Filesystem.readJson<any>(item)
const session = await Bun.file(item).json()
if (!session.projectID) continue
if (!session.summary?.diffs) continue
const { diffs } = session.summary
await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
})
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs))
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write(
JSON.stringify({
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
}),
)
}
},
]
const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage")
const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
return {
dir,
@@ -165,7 +171,7 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.read(target)
const result = await Filesystem.readJson<T>(target)
const result = await Bun.file(target).json()
return result as T
})
}
@@ -175,10 +181,10 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
const content = await Filesystem.readJson<T>(target)
fn(content as T)
await Filesystem.writeJson(target, content)
return content
const content = await Bun.file(target).json()
fn(content)
await Bun.write(target, JSON.stringify(content, null, 2))
return content as T
})
}
@@ -187,7 +193,7 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
await Filesystem.writeJson(target, content)
await Bun.write(target, JSON.stringify(content, null, 2))
})
}