fix: Increase maxBuffer for jq processing to handle large Claude outputs (#473)

Fixes "stdout maxBuffer length exceeded" error by increasing the buffer
from Node.js default of 1MB to 10MB when processing Claude output with jq.
This prevents failures when Claude produces large execution logs.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Ashwin Bhat
2025-08-20 20:02:00 -07:00
committed by GitHub
parent 79cee96324
commit 9f02f6f6d4

View File

@@ -307,7 +307,10 @@ export async function runClaude(promptPath: string, options: ClaudeOptions) {
await writeFile("output.txt", output);
// Process output.txt into JSON and save to execution file
const { stdout: jsonOutput } = await execAsync("jq -s '.' output.txt");
// Increase maxBuffer from Node.js default of 1MB to 10MB to handle large Claude outputs
const { stdout: jsonOutput } = await execAsync("jq -s '.' output.txt", {
maxBuffer: 10 * 1024 * 1024,
});
await writeFile(EXECUTION_FILE, jsonOutput);
console.log(`Log saved to ${EXECUTION_FILE}`);
@@ -324,7 +327,10 @@ export async function runClaude(promptPath: string, options: ClaudeOptions) {
if (output) {
try {
await writeFile("output.txt", output);
const { stdout: jsonOutput } = await execAsync("jq -s '.' output.txt");
// Increase maxBuffer from Node.js default of 1MB to 10MB to handle large Claude outputs
const { stdout: jsonOutput } = await execAsync("jq -s '.' output.txt", {
maxBuffer: 10 * 1024 * 1024,
});
await writeFile(EXECUTION_FILE, jsonOutput);
core.setOutput("execution_file", EXECUTION_FILE);
} catch (e) {