mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-02-01 14:44:29 +00:00
feat(core): Isolate and cleanup truncated tool outputs (#17594)
This commit is contained in:
@@ -32,6 +32,10 @@ import {
|
||||
runExitCleanup,
|
||||
registerTelemetryConfig,
|
||||
} from './utils/cleanup.js';
|
||||
import {
|
||||
cleanupToolOutputFiles,
|
||||
cleanupExpiredSessions,
|
||||
} from './utils/sessionCleanup.js';
|
||||
import {
|
||||
type Config,
|
||||
type ResumedSessionData,
|
||||
@@ -71,7 +75,6 @@ import {
|
||||
} from './core/initializer.js';
|
||||
import { validateAuthMethod } from './config/auth.js';
|
||||
import { runZedIntegration } from './zed-integration/zedIntegration.js';
|
||||
import { cleanupExpiredSessions } from './utils/sessionCleanup.js';
|
||||
import { validateNonInteractiveAuth } from './validateNonInterActiveAuth.js';
|
||||
import { checkForUpdates } from './ui/utils/updateCheck.js';
|
||||
import { handleAutoUpdate } from './utils/handleAutoUpdate.js';
|
||||
@@ -320,7 +323,10 @@ export async function main() {
|
||||
);
|
||||
});
|
||||
|
||||
await cleanupCheckpoints();
|
||||
await Promise.all([
|
||||
cleanupCheckpoints(),
|
||||
cleanupToolOutputFiles(settings.merged),
|
||||
]);
|
||||
|
||||
const parseArgsHandle = startupProfiler.start('parse_arguments');
|
||||
const argv = await parseArguments(settings.merged);
|
||||
|
||||
@@ -17,11 +17,24 @@ import { cleanupExpiredSessions } from './sessionCleanup.js';
|
||||
import { type SessionInfo, getAllSessionFiles } from './sessionUtils.js';
|
||||
|
||||
// Mock the fs module
|
||||
vi.mock('fs/promises');
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('./sessionUtils.js', () => ({
|
||||
getAllSessionFiles: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@google/gemini-cli-core', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('@google/gemini-cli-core')>();
|
||||
return {
|
||||
...actual,
|
||||
Storage: class MockStorage {
|
||||
getProjectTempDir() {
|
||||
return '/tmp/test-project';
|
||||
}
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const mockFs = vi.mocked(fs);
|
||||
const mockGetAllSessionFiles = vi.mocked(getAllSessionFiles);
|
||||
|
||||
|
||||
@@ -6,7 +6,12 @@
|
||||
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import { debugLogger, type Config } from '@google/gemini-cli-core';
|
||||
import {
|
||||
debugLogger,
|
||||
Storage,
|
||||
TOOL_OUTPUT_DIR,
|
||||
type Config,
|
||||
} from '@google/gemini-cli-core';
|
||||
import type { Settings, SessionRetentionSettings } from '../config/settings.js';
|
||||
import { getAllSessionFiles, type SessionFileEntry } from './sessionUtils.js';
|
||||
|
||||
@@ -309,3 +314,148 @@ function validateRetentionConfig(
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of tool output cleanup operation
|
||||
*/
|
||||
export interface ToolOutputCleanupResult {
|
||||
disabled: boolean;
|
||||
scanned: number;
|
||||
deleted: number;
|
||||
failed: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up tool output files based on age and count limits.
|
||||
* Uses the same retention settings as session cleanup.
|
||||
*/
|
||||
export async function cleanupToolOutputFiles(
|
||||
settings: Settings,
|
||||
debugMode: boolean = false,
|
||||
projectTempDir?: string,
|
||||
): Promise<ToolOutputCleanupResult> {
|
||||
const result: ToolOutputCleanupResult = {
|
||||
disabled: false,
|
||||
scanned: 0,
|
||||
deleted: 0,
|
||||
failed: 0,
|
||||
};
|
||||
|
||||
try {
|
||||
// Early exit if cleanup is disabled
|
||||
if (!settings.general?.sessionRetention?.enabled) {
|
||||
return { ...result, disabled: true };
|
||||
}
|
||||
|
||||
const retentionConfig = settings.general.sessionRetention;
|
||||
const tempDir =
|
||||
projectTempDir ?? new Storage(process.cwd()).getProjectTempDir();
|
||||
const toolOutputDir = path.join(tempDir, TOOL_OUTPUT_DIR);
|
||||
|
||||
// Check if directory exists
|
||||
try {
|
||||
await fs.access(toolOutputDir);
|
||||
} catch {
|
||||
// Directory doesn't exist, nothing to clean up
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get all files in the tool_output directory
|
||||
const entries = await fs.readdir(toolOutputDir, { withFileTypes: true });
|
||||
const files = entries.filter((e) => e.isFile());
|
||||
result.scanned = files.length;
|
||||
|
||||
if (files.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get file stats for age-based cleanup (parallel for better performance)
|
||||
const fileStatsResults = await Promise.all(
|
||||
files.map(async (file) => {
|
||||
try {
|
||||
const filePath = path.join(toolOutputDir, file.name);
|
||||
const stat = await fs.stat(filePath);
|
||||
return { name: file.name, mtime: stat.mtime };
|
||||
} catch (error) {
|
||||
debugLogger.debug(
|
||||
`Failed to stat file ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
);
|
||||
const fileStats = fileStatsResults.filter(
|
||||
(f): f is { name: string; mtime: Date } => f !== null,
|
||||
);
|
||||
|
||||
// Sort by mtime (oldest first)
|
||||
fileStats.sort((a, b) => a.mtime.getTime() - b.mtime.getTime());
|
||||
|
||||
const now = new Date();
|
||||
const filesToDelete: string[] = [];
|
||||
|
||||
// Age-based cleanup: delete files older than maxAge
|
||||
if (retentionConfig.maxAge) {
|
||||
try {
|
||||
const maxAgeMs = parseRetentionPeriod(retentionConfig.maxAge);
|
||||
const cutoffDate = new Date(now.getTime() - maxAgeMs);
|
||||
|
||||
for (const file of fileStats) {
|
||||
if (file.mtime < cutoffDate) {
|
||||
filesToDelete.push(file.name);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
debugLogger.debug(
|
||||
`Invalid maxAge format, skipping age-based cleanup: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Count-based cleanup: after age-based cleanup, if we still have more files
|
||||
// than maxCount, delete the oldest ones to bring the count down.
|
||||
// This ensures we keep at most maxCount files, preferring newer ones.
|
||||
if (retentionConfig.maxCount !== undefined) {
|
||||
// Filter out files already marked for deletion by age-based cleanup
|
||||
const remainingFiles = fileStats.filter(
|
||||
(f) => !filesToDelete.includes(f.name),
|
||||
);
|
||||
if (remainingFiles.length > retentionConfig.maxCount) {
|
||||
// Calculate how many excess files need to be deleted
|
||||
const excessCount = remainingFiles.length - retentionConfig.maxCount;
|
||||
// remainingFiles is already sorted oldest first, so delete from the start
|
||||
for (let i = 0; i < excessCount; i++) {
|
||||
filesToDelete.push(remainingFiles[i].name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the files
|
||||
for (const fileName of filesToDelete) {
|
||||
try {
|
||||
const filePath = path.join(toolOutputDir, fileName);
|
||||
await fs.unlink(filePath);
|
||||
result.deleted++;
|
||||
} catch (error) {
|
||||
debugLogger.debug(
|
||||
`Failed to delete file ${fileName}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
result.failed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (debugMode && result.deleted > 0) {
|
||||
debugLogger.debug(
|
||||
`Tool output cleanup: deleted ${result.deleted}, failed ${result.failed}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// Global error handler - don't let cleanup failures break startup
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Unknown error';
|
||||
debugLogger.warn(`Tool output cleanup failed: ${errorMessage}`);
|
||||
result.failed++;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
285
packages/cli/src/utils/toolOutputCleanup.test.ts
Normal file
285
packages/cli/src/utils/toolOutputCleanup.test.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { debugLogger, TOOL_OUTPUT_DIR } from '@google/gemini-cli-core';
|
||||
import type { Settings } from '../config/settings.js';
|
||||
import { cleanupToolOutputFiles } from './sessionCleanup.js';
|
||||
|
||||
describe('Tool Output Cleanup', () => {
|
||||
let testTempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a unique temp directory for each test
|
||||
testTempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'tool-output-test-'));
|
||||
vi.spyOn(debugLogger, 'error').mockImplementation(() => {});
|
||||
vi.spyOn(debugLogger, 'warn').mockImplementation(() => {});
|
||||
vi.spyOn(debugLogger, 'debug').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.restoreAllMocks();
|
||||
// Clean up the temp directory
|
||||
try {
|
||||
await fs.rm(testTempDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('cleanupToolOutputFiles', () => {
|
||||
it('should return early when cleanup is disabled', async () => {
|
||||
const settings: Settings = {
|
||||
general: { sessionRetention: { enabled: false } },
|
||||
};
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(true);
|
||||
expect(result.scanned).toBe(0);
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.failed).toBe(0);
|
||||
});
|
||||
|
||||
it('should return early when sessionRetention is not configured', async () => {
|
||||
const settings: Settings = {};
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(true);
|
||||
expect(result.scanned).toBe(0);
|
||||
expect(result.deleted).toBe(0);
|
||||
});
|
||||
|
||||
it('should return early when tool_output directory does not exist', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxAge: '7d',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Don't create the tool_output directory
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(false);
|
||||
expect(result.scanned).toBe(0);
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.failed).toBe(0);
|
||||
});
|
||||
|
||||
it('should delete files older than maxAge', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxAge: '7d',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create tool_output directory and files
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
await fs.mkdir(toolOutputDir, { recursive: true });
|
||||
|
||||
const now = Date.now();
|
||||
const fiveDaysAgo = now - 5 * 24 * 60 * 60 * 1000;
|
||||
const tenDaysAgo = now - 10 * 24 * 60 * 60 * 1000;
|
||||
|
||||
// Create files with different ages
|
||||
const recentFile = path.join(toolOutputDir, 'shell_recent.txt');
|
||||
const oldFile = path.join(toolOutputDir, 'shell_old.txt');
|
||||
|
||||
await fs.writeFile(recentFile, 'recent content');
|
||||
await fs.writeFile(oldFile, 'old content');
|
||||
|
||||
// Set file modification times
|
||||
await fs.utimes(recentFile, fiveDaysAgo / 1000, fiveDaysAgo / 1000);
|
||||
await fs.utimes(oldFile, tenDaysAgo / 1000, tenDaysAgo / 1000);
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(false);
|
||||
expect(result.scanned).toBe(2);
|
||||
expect(result.deleted).toBe(1); // Only the 10-day-old file should be deleted
|
||||
expect(result.failed).toBe(0);
|
||||
|
||||
// Verify the old file was deleted and recent file remains
|
||||
const remainingFiles = await fs.readdir(toolOutputDir);
|
||||
expect(remainingFiles).toContain('shell_recent.txt');
|
||||
expect(remainingFiles).not.toContain('shell_old.txt');
|
||||
});
|
||||
|
||||
it('should delete oldest files when exceeding maxCount', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxCount: 2,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create tool_output directory and files
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
await fs.mkdir(toolOutputDir, { recursive: true });
|
||||
|
||||
const now = Date.now();
|
||||
const oneDayAgo = now - 1 * 24 * 60 * 60 * 1000;
|
||||
const twoDaysAgo = now - 2 * 24 * 60 * 60 * 1000;
|
||||
const threeDaysAgo = now - 3 * 24 * 60 * 60 * 1000;
|
||||
|
||||
// Create 3 files with different ages
|
||||
const file1 = path.join(toolOutputDir, 'shell_1.txt');
|
||||
const file2 = path.join(toolOutputDir, 'shell_2.txt');
|
||||
const file3 = path.join(toolOutputDir, 'shell_3.txt');
|
||||
|
||||
await fs.writeFile(file1, 'content 1');
|
||||
await fs.writeFile(file2, 'content 2');
|
||||
await fs.writeFile(file3, 'content 3');
|
||||
|
||||
// Set file modification times (file3 is oldest)
|
||||
await fs.utimes(file1, oneDayAgo / 1000, oneDayAgo / 1000);
|
||||
await fs.utimes(file2, twoDaysAgo / 1000, twoDaysAgo / 1000);
|
||||
await fs.utimes(file3, threeDaysAgo / 1000, threeDaysAgo / 1000);
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(false);
|
||||
expect(result.scanned).toBe(3);
|
||||
expect(result.deleted).toBe(1); // Should delete 1 file to get down to maxCount of 2
|
||||
expect(result.failed).toBe(0);
|
||||
|
||||
// Verify the oldest file was deleted
|
||||
const remainingFiles = await fs.readdir(toolOutputDir);
|
||||
expect(remainingFiles).toHaveLength(2);
|
||||
expect(remainingFiles).not.toContain('shell_3.txt');
|
||||
});
|
||||
|
||||
it('should handle empty directory', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxAge: '7d',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create empty tool_output directory
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
await fs.mkdir(toolOutputDir, { recursive: true });
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(false);
|
||||
expect(result.scanned).toBe(0);
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.failed).toBe(0);
|
||||
});
|
||||
|
||||
it('should apply both maxAge and maxCount together', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxAge: '3d',
|
||||
maxCount: 2,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create tool_output directory and files
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
await fs.mkdir(toolOutputDir, { recursive: true });
|
||||
|
||||
const now = Date.now();
|
||||
const oneDayAgo = now - 1 * 24 * 60 * 60 * 1000;
|
||||
const twoDaysAgo = now - 2 * 24 * 60 * 60 * 1000;
|
||||
const twoAndHalfDaysAgo = now - 2.5 * 24 * 60 * 60 * 1000;
|
||||
const fiveDaysAgo = now - 5 * 24 * 60 * 60 * 1000;
|
||||
const tenDaysAgo = now - 10 * 24 * 60 * 60 * 1000;
|
||||
|
||||
// Create 5 files with different ages
|
||||
const file1 = path.join(toolOutputDir, 'shell_1.txt'); // 1 day old - keep
|
||||
const file2 = path.join(toolOutputDir, 'shell_2.txt'); // 2 days old - keep
|
||||
const file3 = path.join(toolOutputDir, 'shell_3.txt'); // 2.5 days old - delete by count
|
||||
const file4 = path.join(toolOutputDir, 'shell_4.txt'); // 5 days old - delete by age
|
||||
const file5 = path.join(toolOutputDir, 'shell_5.txt'); // 10 days old - delete by age
|
||||
|
||||
await fs.writeFile(file1, 'content 1');
|
||||
await fs.writeFile(file2, 'content 2');
|
||||
await fs.writeFile(file3, 'content 3');
|
||||
await fs.writeFile(file4, 'content 4');
|
||||
await fs.writeFile(file5, 'content 5');
|
||||
|
||||
// Set file modification times
|
||||
await fs.utimes(file1, oneDayAgo / 1000, oneDayAgo / 1000);
|
||||
await fs.utimes(file2, twoDaysAgo / 1000, twoDaysAgo / 1000);
|
||||
await fs.utimes(
|
||||
file3,
|
||||
twoAndHalfDaysAgo / 1000,
|
||||
twoAndHalfDaysAgo / 1000,
|
||||
);
|
||||
await fs.utimes(file4, fiveDaysAgo / 1000, fiveDaysAgo / 1000);
|
||||
await fs.utimes(file5, tenDaysAgo / 1000, tenDaysAgo / 1000);
|
||||
|
||||
const result = await cleanupToolOutputFiles(settings, false, testTempDir);
|
||||
|
||||
expect(result.disabled).toBe(false);
|
||||
expect(result.scanned).toBe(5);
|
||||
// file4 and file5 deleted by maxAge, file3 deleted by maxCount
|
||||
expect(result.deleted).toBe(3);
|
||||
expect(result.failed).toBe(0);
|
||||
|
||||
// Verify only the 2 newest files remain
|
||||
const remainingFiles = await fs.readdir(toolOutputDir);
|
||||
expect(remainingFiles).toHaveLength(2);
|
||||
expect(remainingFiles).toContain('shell_1.txt');
|
||||
expect(remainingFiles).toContain('shell_2.txt');
|
||||
expect(remainingFiles).not.toContain('shell_3.txt');
|
||||
expect(remainingFiles).not.toContain('shell_4.txt');
|
||||
expect(remainingFiles).not.toContain('shell_5.txt');
|
||||
});
|
||||
|
||||
it('should log debug information when enabled', async () => {
|
||||
const settings: Settings = {
|
||||
general: {
|
||||
sessionRetention: {
|
||||
enabled: true,
|
||||
maxAge: '1d',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create tool_output directory and an old file
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
await fs.mkdir(toolOutputDir, { recursive: true });
|
||||
|
||||
const tenDaysAgo = Date.now() - 10 * 24 * 60 * 60 * 1000;
|
||||
const oldFile = path.join(toolOutputDir, 'shell_old.txt');
|
||||
await fs.writeFile(oldFile, 'old content');
|
||||
await fs.utimes(oldFile, tenDaysAgo / 1000, tenDaysAgo / 1000);
|
||||
|
||||
const debugSpy = vi
|
||||
.spyOn(debugLogger, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cleanupToolOutputFiles(settings, true, testTempDir);
|
||||
|
||||
expect(debugSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Tool output cleanup: deleted'),
|
||||
);
|
||||
|
||||
debugSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -16,6 +16,7 @@ import type { BaseLlmClient } from '../core/baseLlmClient.js';
|
||||
import type { GeminiChat } from '../core/geminiChat.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import * as fileUtils from '../utils/fileUtils.js';
|
||||
import { TOOL_OUTPUT_DIR } from '../utils/fileUtils.js';
|
||||
import { getInitialChatHistory } from '../utils/environmentContext.js';
|
||||
import * as tokenCalculation from '../utils/tokenCalculation.js';
|
||||
import { tokenLimit } from '../core/tokenLimits.js';
|
||||
@@ -510,8 +511,9 @@ describe('ChatCompressionService', () => {
|
||||
'Output too large.',
|
||||
);
|
||||
|
||||
// Verify a file was actually created
|
||||
const files = fs.readdirSync(testTempDir);
|
||||
// Verify a file was actually created in the tool_output subdirectory
|
||||
const toolOutputDir = path.join(testTempDir, TOOL_OUTPUT_DIR);
|
||||
const files = fs.readdirSync(toolOutputDir);
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
expect(files[0]).toMatch(/grep_.*\.txt/);
|
||||
});
|
||||
|
||||
@@ -1076,7 +1076,11 @@ describe('fileUtils', () => {
|
||||
tempRootDir,
|
||||
);
|
||||
|
||||
const expectedOutputFile = path.join(tempRootDir, 'shell_123.txt');
|
||||
const expectedOutputFile = path.join(
|
||||
tempRootDir,
|
||||
'tool_output',
|
||||
'shell_123.txt',
|
||||
);
|
||||
expect(result.outputFile).toBe(expectedOutputFile);
|
||||
expect(result.totalLines).toBe(1);
|
||||
|
||||
@@ -1102,6 +1106,7 @@ describe('fileUtils', () => {
|
||||
// ../../dangerous/tool -> ______dangerous_tool
|
||||
const expectedOutputFile = path.join(
|
||||
tempRootDir,
|
||||
'tool_output',
|
||||
'______dangerous_tool_1.txt',
|
||||
);
|
||||
expect(result.outputFile).toBe(expectedOutputFile);
|
||||
@@ -1122,6 +1127,7 @@ describe('fileUtils', () => {
|
||||
// ../../etc/passwd -> ______etc_passwd
|
||||
const expectedOutputFile = path.join(
|
||||
tempRootDir,
|
||||
'tool_output',
|
||||
'shell_______etc_passwd.txt',
|
||||
);
|
||||
expect(result.outputFile).toBe(expectedOutputFile);
|
||||
|
||||
@@ -566,6 +566,8 @@ ${processedLines.join('\n')}`;
|
||||
/**
|
||||
* Saves tool output to a temporary file for later retrieval.
|
||||
*/
|
||||
export const TOOL_OUTPUT_DIR = 'tool_output';
|
||||
|
||||
export async function saveTruncatedToolOutput(
|
||||
content: string,
|
||||
toolName: string,
|
||||
@@ -578,8 +580,10 @@ export async function saveTruncatedToolOutput(
|
||||
.replace(/[^a-z0-9]/gi, '_')
|
||||
.toLowerCase();
|
||||
const fileName = `${safeToolName}_${safeId}.txt`;
|
||||
const outputFile = path.join(projectTempDir, fileName);
|
||||
const toolOutputDir = path.join(projectTempDir, TOOL_OUTPUT_DIR);
|
||||
const outputFile = path.join(toolOutputDir, fileName);
|
||||
|
||||
await fsPromises.mkdir(toolOutputDir, { recursive: true });
|
||||
await fsPromises.writeFile(outputFile, content);
|
||||
|
||||
const lines = content.split('\n');
|
||||
|
||||
Reference in New Issue
Block a user