feat: kanban, calendar and update ops

This commit is contained in:
mertmit
2025-10-18 14:35:36 +00:00
parent 3be259816e
commit c592e5afb9
3 changed files with 735 additions and 641 deletions

View File

@@ -2039,6 +2039,320 @@ export function useInfiniteData(args: {
const activeDataListener = ref<string | null>(null)
const activeCommentListener = ref<string | null>(null)
const handleDataEvent = (data: DataPayload) => {
const { id, action, payload, before } = data
if (action === 'add') {
// Add the new row to the local cache (cachedRows)
try {
const dataCache = getDataCache()
const isValidationFailed = !validateRowFilters(
[...allFilters.value, ...computedWhereFilter.value],
payload,
meta.value?.columns as ColumnType[],
getBaseType(viewMeta.value?.view?.source_id),
metas.value,
{
currentUser: user.value,
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
)
// find index to insert the new row
if (before) {
for (const [rowIndex, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
// Insert before the found row
const newRowIndex = rowIndex
// Use descending order so that we first open the position then place the new record
const rowsToShift = Array.from(dataCache.cachedRows.value.entries())
.filter(([index]) => index >= newRowIndex)
.sort((a, b) => b[0] - a[0])
for (const [index, rowData] of rowsToShift) {
rowData.rowMeta.rowIndex = index + 1
dataCache.cachedRows.value.delete(index)
dataCache.cachedRows.value.set(index + 1, rowData)
}
if (!isValidationFailed) {
dataCache.cachedRows.value.set(newRowIndex, {
row: payload,
oldRow: {},
rowMeta: { new: false, rowIndex: newRowIndex, path: [] },
})
dataCache.totalRows.value++
dataCache.actualTotalRows.value = Math.max(dataCache.actualTotalRows.value || 0, dataCache.totalRows.value)
callbacks?.syncVisibleData?.()
}
return
}
}
}
if (!isValidationFailed) {
// If no order is found, append to the end
const newRowIndex = dataCache.totalRows.value
dataCache.cachedRows.value.set(newRowIndex, {
row: payload,
oldRow: {},
rowMeta: { new: false, rowIndex: newRowIndex, path: [] },
})
dataCache.totalRows.value++
dataCache.actualTotalRows.value = Math.max(dataCache.actualTotalRows.value || 0, dataCache.totalRows.value)
callbacks?.syncVisibleData?.()
}
} catch (e) {
console.error('Failed to add cached row on socket event', e)
}
} else if (action === 'update') {
// Update the row in the local cache (cachedRows)
try {
const dataCache = getDataCache()
let updated = false
for (const cachedRow of dataCache.cachedRows.value.values()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
Object.assign(cachedRow.row, payload)
Object.assign(cachedRow.oldRow, payload)
const isValidationFailed = !validateRowFilters(
[...allFilters.value, ...computedWhereFilter.value],
payload,
meta.value?.columns as ColumnType[],
getBaseType(viewMeta.value?.view?.source_id),
metas.value,
{
currentUser: user.value,
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
)
cachedRow.rowMeta.isValidationFailed = isValidationFailed
cachedRow.rowMeta.changed = false
updated = true
break
}
}
if (updated) {
callbacks?.syncVisibleData?.()
} else {
handleDataEvent({
...data,
action: 'add',
})
}
} catch (e) {
console.error('Failed to update cached row on socket event', e)
}
} else if (action === 'delete') {
// Delete the row from the local cache (cachedRows)
try {
const dataCache = getDataCache()
for (const [rowIndex, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
dataCache.cachedRows.value.delete(rowIndex)
const rows = Array.from(dataCache.cachedRows.value.entries())
const rowsToShift = rows.filter(([index]) => index > rowIndex)
rowsToShift.sort((a, b) => a[0] - b[0])
for (const [index, row] of rowsToShift) {
const newIndex = index - 1
row.rowMeta.rowIndex = newIndex
dataCache.cachedRows.value.delete(index)
dataCache.cachedRows.value.set(newIndex, row)
}
if (rowsToShift.length) {
dataCache.chunkStates.value[getChunkIndex(rowsToShift[rowsToShift.length - 1][0])] = undefined
}
dataCache.totalRows.value = (dataCache.totalRows.value || 0) - 1
dataCache.actualTotalRows.value = Math.max(0, (dataCache.actualTotalRows.value || 0) - 1)
}
}
callbacks?.syncVisibleData?.()
} catch (e) {
console.error('Failed to delete cached row on socket event', e)
}
} else if (action === 'reorder') {
// Reorder/move the row in the local cache (cachedRows)
try {
const dataCache = getDataCache()
// Find the row to be moved by its primary key
let rowToMove: Row | null = null
let currentIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
rowToMove = cachedRow
currentIndex = index
break
}
}
if (!rowToMove || currentIndex === null) {
console.warn('Row to move not found in cache:', id)
if (before) {
// Find the 'before' row in cache
let beforeIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
beforeIndex = index
break
}
}
if (beforeIndex !== null) {
// The new row position is before an existing cached row
// We need to shift all rows from beforeIndex onwards down by 1
const newCachedRows = new Map(dataCache.cachedRows.value.entries())
// Get all rows that need to be shifted (from beforeIndex onwards)
const rowsToShift = Array.from((dataCache.cachedRows.value as Map<number, Row>).entries())
.filter(([index]) => index >= beforeIndex!)
.sort((a, b) => b[0] - a[0])
// Shift each row down by 1
for (const [index, row] of rowsToShift) {
const newIndex = index + 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(index)
newCachedRows.set(newIndex, row)
}
// Invalidate affected chunks
if (rowsToShift.length > 0) {
const minAffectedChunk = getChunkIndex(beforeIndex!)
const maxAffectedChunk = getChunkIndex(rowsToShift[rowsToShift.length - 1][0] + 1)
for (let i = minAffectedChunk; i <= maxAffectedChunk; i++) {
dataCache.chunkStates.value[i] = undefined
}
}
// Apply the changes
dataCache.cachedRows.value = newCachedRows
callbacks?.syncVisibleData?.()
} else {
// The 'before' row is not in cache, skip
console.log('Before row not in cache, skipping reorder operation')
}
} else {
// No 'before' specified means move to end
// No changes needed to cache since it's moving to the end
console.log('Row moved to end, no cache changes needed')
callbacks?.syncVisibleData?.()
}
return
}
// Row found in cache - proceed with reordering
// Find the target position based on the 'before' parameter
let targetIndex: number
if (before) {
// Find the row that should come after the moved row
let beforeIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
beforeIndex = index
break
}
}
if (beforeIndex !== null) {
targetIndex = beforeIndex
} else {
// If 'before' row not found in cache, move to end
targetIndex = Math.max(...Array.from(dataCache.cachedRows.value.keys())) + 1
}
} else {
// If no 'before' specified, move to the end
targetIndex = Math.max(...Array.from(dataCache.cachedRows.value.keys())) + 1
}
// If the row is already at the target position, no need to move
if (currentIndex === targetIndex) {
return
}
// Create a new cached rows map with the updated positions
const newCachedRows = new Map(dataCache.cachedRows.value.entries())
// Remove the row from its current position
newCachedRows.delete(currentIndex)
// Determine the final target index (similar to updateRecordOrder logic)
const finalTargetIndex = targetIndex > currentIndex ? targetIndex - 1 : targetIndex
// Shift rows to make space for the moved row
if (finalTargetIndex < currentIndex) {
// Moving up: shift rows down
for (let i = currentIndex - 1; i >= finalTargetIndex; i--) {
const row = newCachedRows.get(i)
if (row) {
const newIndex = i + 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(i)
newCachedRows.set(newIndex, row)
}
}
} else {
// Moving down: shift rows up
for (let i = currentIndex + 1; i <= finalTargetIndex; i++) {
const row = newCachedRows.get(i)
if (row) {
const newIndex = i - 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(i)
newCachedRows.set(newIndex, row)
}
}
}
// Place the moved row at its new position
rowToMove.rowMeta.rowIndex = finalTargetIndex
newCachedRows.set(finalTargetIndex, rowToMove)
// Update any changed data in the moved row (if payload contains updates)
if (payload && typeof payload === 'object') {
Object.assign(rowToMove.row, payload)
Object.assign(rowToMove.oldRow, payload)
}
rowToMove.rowMeta.changed = false
// Invalidate affected chunks
const targetChunkIndex = getChunkIndex(finalTargetIndex)
const sourceChunkIndex = getChunkIndex(currentIndex)
for (let i = Math.min(sourceChunkIndex, targetChunkIndex); i <= Math.max(sourceChunkIndex, targetChunkIndex); i++) {
dataCache.chunkStates.value[i] = undefined
}
// Apply the changes
dataCache.cachedRows.value = newCachedRows
callbacks?.syncVisibleData?.()
} catch (e) {
console.error('Failed to reorder cached row on socket event', e)
}
}
}
watch(
meta,
(newMeta, oldMeta) => {
@@ -2055,321 +2369,7 @@ export function useInfiniteData(args: {
activeDataListener.value = $ncSocket.onMessage(
`${EventType.DATA_EVENT}:${newMeta.fk_workspace_id}:${newMeta.base_id}:${newMeta.id}`,
(data: DataPayload) => {
const { id, action, payload, before } = data
if (action === 'add') {
// Add the new row to the local cache (cachedRows)
try {
const dataCache = getDataCache()
const isValidationFailed = !validateRowFilters(
[...allFilters.value, ...computedWhereFilter.value],
payload,
meta.value?.columns as ColumnType[],
getBaseType(viewMeta.value?.view?.source_id),
metas.value,
{
currentUser: user.value,
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
)
// find index to insert the new row
if (before) {
for (const [rowIndex, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
// Insert before the found row
const newRowIndex = rowIndex
// Use descending order so that we first open the position then place the new record
const rowsToShift = Array.from(dataCache.cachedRows.value.entries())
.filter(([index]) => index >= newRowIndex)
.sort((a, b) => b[0] - a[0])
for (const [index, rowData] of rowsToShift) {
rowData.rowMeta.rowIndex = index + 1
dataCache.cachedRows.value.delete(index)
dataCache.cachedRows.value.set(index + 1, rowData)
}
if (!isValidationFailed) {
dataCache.cachedRows.value.set(newRowIndex, {
row: payload,
oldRow: {},
rowMeta: { new: false, rowIndex: newRowIndex, path: [] },
})
dataCache.totalRows.value++
dataCache.actualTotalRows.value = Math.max(
dataCache.actualTotalRows.value || 0,
dataCache.totalRows.value,
)
callbacks?.syncVisibleData?.()
}
return
}
}
}
if (!isValidationFailed) {
// If no order is found, append to the end
const newRowIndex = dataCache.totalRows.value
dataCache.cachedRows.value.set(newRowIndex, {
row: payload,
oldRow: {},
rowMeta: { new: false, rowIndex: newRowIndex, path: [] },
})
dataCache.totalRows.value++
dataCache.actualTotalRows.value = Math.max(dataCache.actualTotalRows.value || 0, dataCache.totalRows.value)
callbacks?.syncVisibleData?.()
}
} catch (e) {
console.error('Failed to add cached row on socket event', e)
}
} else if (action === 'update') {
// Update the row in the local cache (cachedRows)
try {
const dataCache = getDataCache()
let updated = false
for (const cachedRow of dataCache.cachedRows.value.values()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
Object.assign(cachedRow.row, payload)
Object.assign(cachedRow.oldRow, payload)
const isValidationFailed = !validateRowFilters(
[...allFilters.value, ...computedWhereFilter.value],
payload,
meta.value?.columns as ColumnType[],
getBaseType(viewMeta.value?.view?.source_id),
metas.value,
{
currentUser: user.value,
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
)
cachedRow.rowMeta.isValidationFailed = isValidationFailed
cachedRow.rowMeta.changed = false
updated = true
break
}
}
if (updated) {
callbacks?.syncVisibleData?.()
}
} catch (e) {
console.error('Failed to update cached row on socket event', e)
}
} else if (action === 'delete') {
// Delete the row from the local cache (cachedRows)
try {
const dataCache = getDataCache()
for (const [rowIndex, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
dataCache.cachedRows.value.delete(rowIndex)
const rows = Array.from(dataCache.cachedRows.value.entries())
const rowsToShift = rows.filter(([index]) => index > rowIndex)
rowsToShift.sort((a, b) => a[0] - b[0])
for (const [index, row] of rowsToShift) {
const newIndex = index - 1
row.rowMeta.rowIndex = newIndex
dataCache.cachedRows.value.delete(index)
dataCache.cachedRows.value.set(newIndex, row)
}
if (rowsToShift.length) {
dataCache.chunkStates.value[getChunkIndex(rowsToShift[rowsToShift.length - 1][0])] = undefined
}
dataCache.totalRows.value = (dataCache.totalRows.value || 0) - 1
dataCache.actualTotalRows.value = Math.max(0, (dataCache.actualTotalRows.value || 0) - 1)
}
}
callbacks?.syncVisibleData?.()
} catch (e) {
console.error('Failed to delete cached row on socket event', e)
}
} else if (action === 'reorder') {
// Reorder/move the row in the local cache (cachedRows)
try {
const dataCache = getDataCache()
// Find the row to be moved by its primary key
let rowToMove: Row | null = null
let currentIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${id}`) {
rowToMove = cachedRow
currentIndex = index
break
}
}
if (!rowToMove || currentIndex === null) {
console.warn('Row to move not found in cache:', id)
if (before) {
// Find the 'before' row in cache
let beforeIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
beforeIndex = index
break
}
}
if (beforeIndex !== null) {
// The new row position is before an existing cached row
// We need to shift all rows from beforeIndex onwards down by 1
const newCachedRows = new Map(dataCache.cachedRows.value.entries())
// Get all rows that need to be shifted (from beforeIndex onwards)
const rowsToShift = Array.from((dataCache.cachedRows.value as Map<number, Row>).entries())
.filter(([index]) => index >= beforeIndex!)
.sort((a, b) => b[0] - a[0])
// Shift each row down by 1
for (const [index, row] of rowsToShift) {
const newIndex = index + 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(index)
newCachedRows.set(newIndex, row)
}
// Invalidate affected chunks
if (rowsToShift.length > 0) {
const minAffectedChunk = getChunkIndex(beforeIndex!)
const maxAffectedChunk = getChunkIndex(rowsToShift[rowsToShift.length - 1][0] + 1)
for (let i = minAffectedChunk; i <= maxAffectedChunk; i++) {
dataCache.chunkStates.value[i] = undefined
}
}
// Apply the changes
dataCache.cachedRows.value = newCachedRows
callbacks?.syncVisibleData?.()
} else {
// The 'before' row is not in cache, skip
console.log('Before row not in cache, skipping reorder operation')
}
} else {
// No 'before' specified means move to end
// No changes needed to cache since it's moving to the end
console.log('Row moved to end, no cache changes needed')
callbacks?.syncVisibleData?.()
}
return
}
// Row found in cache - proceed with reordering
// Find the target position based on the 'before' parameter
let targetIndex: number
if (before) {
// Find the row that should come after the moved row
let beforeIndex: number | null = null
for (const [index, cachedRow] of dataCache.cachedRows.value.entries()) {
const pk = extractPkFromRow(cachedRow.row, meta.value?.columns as ColumnType[])
if (pk && `${pk}` === `${before}`) {
beforeIndex = index
break
}
}
if (beforeIndex !== null) {
targetIndex = beforeIndex
} else {
// If 'before' row not found in cache, move to end
targetIndex = Math.max(...Array.from(dataCache.cachedRows.value.keys())) + 1
}
} else {
// If no 'before' specified, move to the end
targetIndex = Math.max(...Array.from(dataCache.cachedRows.value.keys())) + 1
}
// If the row is already at the target position, no need to move
if (currentIndex === targetIndex) {
return
}
// Create a new cached rows map with the updated positions
const newCachedRows = new Map(dataCache.cachedRows.value.entries())
// Remove the row from its current position
newCachedRows.delete(currentIndex)
// Determine the final target index (similar to updateRecordOrder logic)
const finalTargetIndex = targetIndex > currentIndex ? targetIndex - 1 : targetIndex
// Shift rows to make space for the moved row
if (finalTargetIndex < currentIndex) {
// Moving up: shift rows down
for (let i = currentIndex - 1; i >= finalTargetIndex; i--) {
const row = newCachedRows.get(i)
if (row) {
const newIndex = i + 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(i)
newCachedRows.set(newIndex, row)
}
}
} else {
// Moving down: shift rows up
for (let i = currentIndex + 1; i <= finalTargetIndex; i++) {
const row = newCachedRows.get(i)
if (row) {
const newIndex = i - 1
row.rowMeta.rowIndex = newIndex
newCachedRows.delete(i)
newCachedRows.set(newIndex, row)
}
}
}
// Place the moved row at its new position
rowToMove.rowMeta.rowIndex = finalTargetIndex
newCachedRows.set(finalTargetIndex, rowToMove)
// Update any changed data in the moved row (if payload contains updates)
if (payload && typeof payload === 'object') {
Object.assign(rowToMove.row, payload)
Object.assign(rowToMove.oldRow, payload)
}
rowToMove.rowMeta.changed = false
// Invalidate affected chunks
const targetChunkIndex = getChunkIndex(finalTargetIndex)
const sourceChunkIndex = getChunkIndex(currentIndex)
for (
let i = Math.min(sourceChunkIndex, targetChunkIndex);
i <= Math.max(sourceChunkIndex, targetChunkIndex);
i++
) {
dataCache.chunkStates.value[i] = undefined
}
// Apply the changes
dataCache.cachedRows.value = newCachedRows
callbacks?.syncVisibleData?.()
} catch (e) {
console.error('Failed to reorder cached row on socket event', e)
}
}
},
handleDataEvent,
)
activeCommentListener.value = $ncSocket.onMessage(