mirror of
https://github.com/anthropics/claude-code-action.git
synced 2026-01-23 23:14:13 +08:00
Compare commits
12 Commits
boris/add-
...
v1.0.29
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b8ee3b941 | ||
|
|
c247cb152d | ||
|
|
cefa60067a | ||
|
|
7a708f68fa | ||
|
|
5da7ba548c | ||
|
|
964b8355fb | ||
|
|
c83d67a9b9 | ||
|
|
c9ec2b02b4 | ||
|
|
63ea7e3174 | ||
|
|
653f9cd7a3 | ||
|
|
b17b541bbc | ||
|
|
7e4bf87b1c |
21
action.yml
21
action.yml
@@ -23,6 +23,10 @@ inputs:
|
|||||||
description: "The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format)"
|
description: "The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format)"
|
||||||
required: false
|
required: false
|
||||||
default: "claude/"
|
default: "claude/"
|
||||||
|
branch_name_template:
|
||||||
|
description: "Template for branch naming. Available variables: {{prefix}}, {{entityType}}, {{entityNumber}}, {{timestamp}}, {{sha}}, {{label}}, {{description}}. {{label}} will be first label from the issue/PR, or {{entityType}} as a fallback. {{description}} will be the first 5 words of the issue/PR title in kebab-case. Default: '{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}'"
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
allowed_bots:
|
allowed_bots:
|
||||||
description: "Comma-separated list of allowed bot usernames, or '*' to allow all bots. Empty string (default) allows no bots."
|
description: "Comma-separated list of allowed bot usernames, or '*' to allow all bots. Empty string (default) allows no bots."
|
||||||
required: false
|
required: false
|
||||||
@@ -81,6 +85,10 @@ inputs:
|
|||||||
description: "Enable commit signing using GitHub's commit signature verification. When false, Claude uses standard git commands"
|
description: "Enable commit signing using GitHub's commit signature verification. When false, Claude uses standard git commands"
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
|
ssh_signing_key:
|
||||||
|
description: "SSH private key for signing commits. When provided, git will be configured to use SSH signing. Takes precedence over use_commit_signing."
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
bot_id:
|
bot_id:
|
||||||
description: "GitHub user ID to use for git operations (defaults to Claude's bot ID)"
|
description: "GitHub user ID to use for git operations (defaults to Claude's bot ID)"
|
||||||
required: false
|
required: false
|
||||||
@@ -174,6 +182,7 @@ runs:
|
|||||||
LABEL_TRIGGER: ${{ inputs.label_trigger }}
|
LABEL_TRIGGER: ${{ inputs.label_trigger }}
|
||||||
BASE_BRANCH: ${{ inputs.base_branch }}
|
BASE_BRANCH: ${{ inputs.base_branch }}
|
||||||
BRANCH_PREFIX: ${{ inputs.branch_prefix }}
|
BRANCH_PREFIX: ${{ inputs.branch_prefix }}
|
||||||
|
BRANCH_NAME_TEMPLATE: ${{ inputs.branch_name_template }}
|
||||||
OVERRIDE_GITHUB_TOKEN: ${{ inputs.github_token }}
|
OVERRIDE_GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||||
ALLOWED_BOTS: ${{ inputs.allowed_bots }}
|
ALLOWED_BOTS: ${{ inputs.allowed_bots }}
|
||||||
ALLOWED_NON_WRITE_USERS: ${{ inputs.allowed_non_write_users }}
|
ALLOWED_NON_WRITE_USERS: ${{ inputs.allowed_non_write_users }}
|
||||||
@@ -181,6 +190,7 @@ runs:
|
|||||||
USE_STICKY_COMMENT: ${{ inputs.use_sticky_comment }}
|
USE_STICKY_COMMENT: ${{ inputs.use_sticky_comment }}
|
||||||
DEFAULT_WORKFLOW_TOKEN: ${{ github.token }}
|
DEFAULT_WORKFLOW_TOKEN: ${{ github.token }}
|
||||||
USE_COMMIT_SIGNING: ${{ inputs.use_commit_signing }}
|
USE_COMMIT_SIGNING: ${{ inputs.use_commit_signing }}
|
||||||
|
SSH_SIGNING_KEY: ${{ inputs.ssh_signing_key }}
|
||||||
BOT_ID: ${{ inputs.bot_id }}
|
BOT_ID: ${{ inputs.bot_id }}
|
||||||
BOT_NAME: ${{ inputs.bot_name }}
|
BOT_NAME: ${{ inputs.bot_name }}
|
||||||
TRACK_PROGRESS: ${{ inputs.track_progress }}
|
TRACK_PROGRESS: ${{ inputs.track_progress }}
|
||||||
@@ -203,12 +213,13 @@ runs:
|
|||||||
|
|
||||||
# Install Claude Code if no custom executable is provided
|
# Install Claude Code if no custom executable is provided
|
||||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||||
CLAUDE_CODE_VERSION="2.0.76"
|
CLAUDE_CODE_VERSION="2.1.1"
|
||||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||||
for attempt in 1 2 3; do
|
for attempt in 1 2 3; do
|
||||||
echo "Installation attempt $attempt..."
|
echo "Installation attempt $attempt..."
|
||||||
if command -v timeout &> /dev/null; then
|
if command -v timeout &> /dev/null; then
|
||||||
timeout 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
# Use --foreground to kill entire process group on timeout, --kill-after to send SIGKILL if SIGTERM fails
|
||||||
|
timeout --foreground --kill-after=10 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||||
else
|
else
|
||||||
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
||||||
fi
|
fi
|
||||||
@@ -334,6 +345,12 @@ runs:
|
|||||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Cleanup SSH signing key
|
||||||
|
if: always() && inputs.ssh_signing_key != ''
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
bun run ${GITHUB_ACTION_PATH}/src/entrypoints/cleanup-ssh-signing.ts
|
||||||
|
|
||||||
- name: Revoke app token
|
- name: Revoke app token
|
||||||
if: always() && inputs.github_token == '' && steps.prepare.outputs.skipped_due_to_workflow_validation_mismatch != 'true'
|
if: always() && inputs.github_token == '' && steps.prepare.outputs.skipped_due_to_workflow_validation_mismatch != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
@@ -124,12 +124,13 @@ runs:
|
|||||||
PATH_TO_CLAUDE_CODE_EXECUTABLE: ${{ inputs.path_to_claude_code_executable }}
|
PATH_TO_CLAUDE_CODE_EXECUTABLE: ${{ inputs.path_to_claude_code_executable }}
|
||||||
run: |
|
run: |
|
||||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||||
CLAUDE_CODE_VERSION="2.0.76"
|
CLAUDE_CODE_VERSION="2.1.1"
|
||||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||||
for attempt in 1 2 3; do
|
for attempt in 1 2 3; do
|
||||||
echo "Installation attempt $attempt..."
|
echo "Installation attempt $attempt..."
|
||||||
if command -v timeout &> /dev/null; then
|
if command -v timeout &> /dev/null; then
|
||||||
timeout 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
# Use --foreground to kill entire process group on timeout, --kill-after to send SIGKILL if SIGTERM fails
|
||||||
|
timeout --foreground --kill-after=10 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||||
else
|
else
|
||||||
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"name": "@anthropic-ai/claude-code-base-action",
|
"name": "@anthropic-ai/claude-code-base-action",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||||
"shell-quote": "^1.8.3",
|
"shell-quote": "^1.8.3",
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||||
|
|
||||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.1", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-ZJO/TWcrFHGQTGHJDJl03mWozirWMBqdNpbuAgxZpLaHj2N5vyMxoeYiJC+7M0+gOSs7bjwKJLKTZcHGtGa34g=="],
|
||||||
|
|
||||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||||
"shell-quote": "^1.8.3"
|
"shell-quote": "^1.8.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -8,26 +8,47 @@ const MARKETPLACE_URL_REGEX =
|
|||||||
/^https:\/\/[a-zA-Z0-9\-._~:/?#[\]@!$&'()*+,;=%]+\.git$/;
|
/^https:\/\/[a-zA-Z0-9\-._~:/?#[\]@!$&'()*+,;=%]+\.git$/;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates a marketplace URL for security issues
|
* Checks if a marketplace input is a local path (not a URL)
|
||||||
* @param url - The marketplace URL to validate
|
* @param input - The marketplace input to check
|
||||||
* @throws {Error} If the URL is invalid
|
* @returns true if the input is a local path, false if it's a URL
|
||||||
*/
|
*/
|
||||||
function validateMarketplaceUrl(url: string): void {
|
function isLocalPath(input: string): boolean {
|
||||||
const normalized = url.trim();
|
// Local paths start with ./, ../, /, or a drive letter (Windows)
|
||||||
|
return (
|
||||||
|
input.startsWith("./") ||
|
||||||
|
input.startsWith("../") ||
|
||||||
|
input.startsWith("/") ||
|
||||||
|
/^[a-zA-Z]:[\\\/]/.test(input)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a marketplace URL or local path
|
||||||
|
* @param input - The marketplace URL or local path to validate
|
||||||
|
* @throws {Error} If the input is invalid
|
||||||
|
*/
|
||||||
|
function validateMarketplaceInput(input: string): void {
|
||||||
|
const normalized = input.trim();
|
||||||
|
|
||||||
if (!normalized) {
|
if (!normalized) {
|
||||||
throw new Error("Marketplace URL cannot be empty");
|
throw new Error("Marketplace URL or path cannot be empty");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Local paths are passed directly to Claude Code which handles them
|
||||||
|
if (isLocalPath(normalized)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate as URL
|
||||||
if (!MARKETPLACE_URL_REGEX.test(normalized)) {
|
if (!MARKETPLACE_URL_REGEX.test(normalized)) {
|
||||||
throw new Error(`Invalid marketplace URL format: ${url}`);
|
throw new Error(`Invalid marketplace URL format: ${input}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Additional check for valid URL structure
|
// Additional check for valid URL structure
|
||||||
try {
|
try {
|
||||||
new URL(normalized);
|
new URL(normalized);
|
||||||
} catch {
|
} catch {
|
||||||
throw new Error(`Invalid marketplace URL: ${url}`);
|
throw new Error(`Invalid marketplace URL: ${input}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,9 +76,9 @@ function validatePluginName(pluginName: string): void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a newline-separated list of marketplace URLs and return an array of validated URLs
|
* Parse a newline-separated list of marketplace URLs or local paths and return an array of validated entries
|
||||||
* @param marketplaces - Newline-separated list of marketplace Git URLs
|
* @param marketplaces - Newline-separated list of marketplace Git URLs or local paths
|
||||||
* @returns Array of validated marketplace URLs (empty array if none provided)
|
* @returns Array of validated marketplace URLs or paths (empty array if none provided)
|
||||||
*/
|
*/
|
||||||
function parseMarketplaces(marketplaces?: string): string[] {
|
function parseMarketplaces(marketplaces?: string): string[] {
|
||||||
const trimmed = marketplaces?.trim();
|
const trimmed = marketplaces?.trim();
|
||||||
@@ -66,14 +87,14 @@ function parseMarketplaces(marketplaces?: string): string[] {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Split by newline and process each URL
|
// Split by newline and process each entry
|
||||||
return trimmed
|
return trimmed
|
||||||
.split("\n")
|
.split("\n")
|
||||||
.map((url) => url.trim())
|
.map((entry) => entry.trim())
|
||||||
.filter((url) => {
|
.filter((entry) => {
|
||||||
if (url.length === 0) return false;
|
if (entry.length === 0) return false;
|
||||||
|
|
||||||
validateMarketplaceUrl(url);
|
validateMarketplaceInput(entry);
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -163,26 +184,26 @@ async function installPlugin(
|
|||||||
/**
|
/**
|
||||||
* Adds a Claude Code plugin marketplace
|
* Adds a Claude Code plugin marketplace
|
||||||
* @param claudeExecutable - Path to the Claude executable
|
* @param claudeExecutable - Path to the Claude executable
|
||||||
* @param marketplaceUrl - The marketplace Git URL to add
|
* @param marketplace - The marketplace Git URL or local path to add
|
||||||
* @returns Promise that resolves when the marketplace add command completes
|
* @returns Promise that resolves when the marketplace add command completes
|
||||||
* @throws {Error} If the command fails to execute
|
* @throws {Error} If the command fails to execute
|
||||||
*/
|
*/
|
||||||
async function addMarketplace(
|
async function addMarketplace(
|
||||||
claudeExecutable: string,
|
claudeExecutable: string,
|
||||||
marketplaceUrl: string,
|
marketplace: string,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
console.log(`Adding marketplace: ${marketplaceUrl}`);
|
console.log(`Adding marketplace: ${marketplace}`);
|
||||||
|
|
||||||
return executeClaudeCommand(
|
return executeClaudeCommand(
|
||||||
claudeExecutable,
|
claudeExecutable,
|
||||||
["plugin", "marketplace", "add", marketplaceUrl],
|
["plugin", "marketplace", "add", marketplace],
|
||||||
`Failed to add marketplace '${marketplaceUrl}'`,
|
`Failed to add marketplace '${marketplace}'`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Installs Claude Code plugins from a newline-separated list
|
* Installs Claude Code plugins from a newline-separated list
|
||||||
* @param marketplacesInput - Newline-separated list of marketplace Git URLs
|
* @param marketplacesInput - Newline-separated list of marketplace Git URLs or local paths
|
||||||
* @param pluginsInput - Newline-separated list of plugin names
|
* @param pluginsInput - Newline-separated list of plugin names
|
||||||
* @param claudeExecutable - Path to the Claude executable (defaults to "claude")
|
* @param claudeExecutable - Path to the Claude executable (defaults to "claude")
|
||||||
* @returns Promise that resolves when all plugins are installed
|
* @returns Promise that resolves when all plugins are installed
|
||||||
|
|||||||
@@ -212,6 +212,8 @@ export function parseSdkOptions(options: ClaudeOptions): ParsedSdkOptions {
|
|||||||
if (process.env.INPUT_ACTION_INPUTS_PRESENT) {
|
if (process.env.INPUT_ACTION_INPUTS_PRESENT) {
|
||||||
env.GITHUB_ACTION_INPUTS = process.env.INPUT_ACTION_INPUTS_PRESENT;
|
env.GITHUB_ACTION_INPUTS = process.env.INPUT_ACTION_INPUTS_PRESENT;
|
||||||
}
|
}
|
||||||
|
// Ensure SDK path uses the same entrypoint as the CLI path
|
||||||
|
env.CLAUDE_CODE_ENTRYPOINT = "claude-code-github-action";
|
||||||
|
|
||||||
// Build system prompt option - default to claude_code preset
|
// Build system prompt option - default to claude_code preset
|
||||||
let systemPrompt: SdkOptions["systemPrompt"];
|
let systemPrompt: SdkOptions["systemPrompt"];
|
||||||
|
|||||||
@@ -1,14 +1,81 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import { readFile, writeFile } from "fs/promises";
|
import { readFile, writeFile, access } from "fs/promises";
|
||||||
|
import { dirname, join } from "path";
|
||||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||||
import type {
|
import type {
|
||||||
SDKMessage,
|
SDKMessage,
|
||||||
SDKResultMessage,
|
SDKResultMessage,
|
||||||
|
SDKUserMessage,
|
||||||
} from "@anthropic-ai/claude-agent-sdk";
|
} from "@anthropic-ai/claude-agent-sdk";
|
||||||
import type { ParsedSdkOptions } from "./parse-sdk-options";
|
import type { ParsedSdkOptions } from "./parse-sdk-options";
|
||||||
|
|
||||||
const EXECUTION_FILE = `${process.env.RUNNER_TEMP}/claude-execution-output.json`;
|
const EXECUTION_FILE = `${process.env.RUNNER_TEMP}/claude-execution-output.json`;
|
||||||
|
|
||||||
|
/** Filename for the user request file, written by prompt generation */
|
||||||
|
const USER_REQUEST_FILENAME = "claude-user-request.txt";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file exists
|
||||||
|
*/
|
||||||
|
async function fileExists(path: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await access(path);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a prompt configuration for the SDK.
|
||||||
|
* If a user request file exists alongside the prompt file, returns a multi-block
|
||||||
|
* SDKUserMessage that enables slash command processing in the CLI.
|
||||||
|
* Otherwise, returns the prompt as a simple string.
|
||||||
|
*/
|
||||||
|
async function createPromptConfig(
|
||||||
|
promptPath: string,
|
||||||
|
showFullOutput: boolean,
|
||||||
|
): Promise<string | AsyncIterable<SDKUserMessage>> {
|
||||||
|
const promptContent = await readFile(promptPath, "utf-8");
|
||||||
|
|
||||||
|
// Check for user request file in the same directory
|
||||||
|
const userRequestPath = join(dirname(promptPath), USER_REQUEST_FILENAME);
|
||||||
|
const hasUserRequest = await fileExists(userRequestPath);
|
||||||
|
|
||||||
|
if (!hasUserRequest) {
|
||||||
|
// No user request file - use simple string prompt
|
||||||
|
return promptContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
// User request file exists - create multi-block message
|
||||||
|
const userRequest = await readFile(userRequestPath, "utf-8");
|
||||||
|
if (showFullOutput) {
|
||||||
|
console.log("Using multi-block message with user request:", userRequest);
|
||||||
|
} else {
|
||||||
|
console.log("Using multi-block message with user request (content hidden)");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an async generator that yields a single multi-block message
|
||||||
|
// The context/instructions go first, then the user's actual request last
|
||||||
|
// This allows the CLI to detect and process slash commands in the user request
|
||||||
|
async function* createMultiBlockMessage(): AsyncGenerator<SDKUserMessage> {
|
||||||
|
yield {
|
||||||
|
type: "user",
|
||||||
|
session_id: "",
|
||||||
|
message: {
|
||||||
|
role: "user",
|
||||||
|
content: [
|
||||||
|
{ type: "text", text: promptContent }, // Instructions + GitHub context
|
||||||
|
{ type: "text", text: userRequest }, // User's request (may be a slash command)
|
||||||
|
],
|
||||||
|
},
|
||||||
|
parent_tool_use_id: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return createMultiBlockMessage();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sanitizes SDK output to match CLI sanitization behavior
|
* Sanitizes SDK output to match CLI sanitization behavior
|
||||||
*/
|
*/
|
||||||
@@ -63,7 +130,8 @@ export async function runClaudeWithSdk(
|
|||||||
promptPath: string,
|
promptPath: string,
|
||||||
{ sdkOptions, showFullOutput, hasJsonSchema }: ParsedSdkOptions,
|
{ sdkOptions, showFullOutput, hasJsonSchema }: ParsedSdkOptions,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const prompt = await readFile(promptPath, "utf-8");
|
// Create prompt configuration - may be a string or multi-block message
|
||||||
|
const prompt = await createPromptConfig(promptPath, showFullOutput);
|
||||||
|
|
||||||
if (!showFullOutput) {
|
if (!showFullOutput) {
|
||||||
console.log(
|
console.log(
|
||||||
|
|||||||
@@ -596,4 +596,111 @@ describe("installPlugins", () => {
|
|||||||
{ stdio: "inherit" },
|
{ stdio: "inherit" },
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Local marketplace path tests
|
||||||
|
test("should accept local marketplace path with ./", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("./my-local-marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "./my-local-marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
2,
|
||||||
|
"claude",
|
||||||
|
["plugin", "install", "test-plugin"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept local marketplace path with absolute Unix path", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("/home/user/my-marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "/home/user/my-marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept local marketplace path with Windows absolute path", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("C:\\Users\\user\\marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "C:\\Users\\user\\marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept mixed local and remote marketplaces", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins(
|
||||||
|
"./local-marketplace\nhttps://github.com/user/remote.git",
|
||||||
|
"test-plugin",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(3);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "./local-marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
2,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "https://github.com/user/remote.git"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept local path with ../ (parent directory)", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("../shared-plugins/marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "../shared-plugins/marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept local path with nested directories", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("./plugins/my-org/my-marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "./plugins/my-org/my-marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept local path with dots in directory name", async () => {
|
||||||
|
const spy = createMockSpawn();
|
||||||
|
await installPlugins("./my.plugin.marketplace", "test-plugin");
|
||||||
|
|
||||||
|
expect(spy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(spy).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
"claude",
|
||||||
|
["plugin", "marketplace", "add", "./my.plugin.marketplace"],
|
||||||
|
{ stdio: "inherit" },
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
"name": "mcp-test",
|
"name": "mcp-test",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0"
|
"@modelcontextprotocol/sdk": "^1.24.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4
bun.lock
4
bun.lock
@@ -7,7 +7,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.1",
|
"@actions/github": "^6.0.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||||
"@octokit/graphql": "^8.2.2",
|
"@octokit/graphql": "^8.2.2",
|
||||||
"@octokit/rest": "^21.1.1",
|
"@octokit/rest": "^21.1.1",
|
||||||
@@ -37,7 +37,7 @@
|
|||||||
|
|
||||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||||
|
|
||||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.1", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-ZJO/TWcrFHGQTGHJDJl03mWozirWMBqdNpbuAgxZpLaHj2N5vyMxoeYiJC+7M0+gOSs7bjwKJLKTZcHGtGa34g=="],
|
||||||
|
|
||||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||||
|
|
||||||
|
|||||||
@@ -38,7 +38,64 @@ The following permissions are requested but not yet actively used. These will en
|
|||||||
|
|
||||||
## Commit Signing
|
## Commit Signing
|
||||||
|
|
||||||
Commits made by Claude through this action are no longer automatically signed with commit signatures. To enable commit signing set `use_commit_signing: True` in the workflow(s). This ensures the authenticity and integrity of commits, providing a verifiable trail of changes made by the action.
|
By default, commits made by Claude are unsigned. You can enable commit signing using one of two methods:
|
||||||
|
|
||||||
|
### Option 1: GitHub API Commit Signing (use_commit_signing)
|
||||||
|
|
||||||
|
This uses GitHub's API to create commits, which automatically signs them as verified from the GitHub App:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: anthropics/claude-code-action@main
|
||||||
|
with:
|
||||||
|
use_commit_signing: true
|
||||||
|
```
|
||||||
|
|
||||||
|
This is the simplest option and requires no additional setup. However, because it uses the GitHub API instead of git CLI, it cannot perform complex git operations like rebasing, cherry-picking, or interactive history manipulation.
|
||||||
|
|
||||||
|
### Option 2: SSH Signing Key (ssh_signing_key)
|
||||||
|
|
||||||
|
This uses an SSH key to sign commits via git CLI. Use this option when you need both signed commits AND standard git operations (rebasing, cherry-picking, etc.):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: anthropics/claude-code-action@main
|
||||||
|
with:
|
||||||
|
ssh_signing_key: ${{ secrets.SSH_SIGNING_KEY }}
|
||||||
|
bot_id: "YOUR_GITHUB_USER_ID"
|
||||||
|
bot_name: "YOUR_GITHUB_USERNAME"
|
||||||
|
```
|
||||||
|
|
||||||
|
Commits will show as verified and attributed to the GitHub account that owns the signing key.
|
||||||
|
|
||||||
|
**Setup steps:**
|
||||||
|
|
||||||
|
1. Generate an SSH key pair for signing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ssh-keygen -t ed25519 -f ~/.ssh/signing_key -N "" -C "commit signing key"
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add the **public key** to your GitHub account:
|
||||||
|
|
||||||
|
- Go to GitHub → Settings → SSH and GPG keys
|
||||||
|
- Click "New SSH key"
|
||||||
|
- Select **Key type: Signing Key** (important)
|
||||||
|
- Paste the contents of `~/.ssh/signing_key.pub`
|
||||||
|
|
||||||
|
3. Add the **private key** to your repository secrets:
|
||||||
|
|
||||||
|
- Go to your repo → Settings → Secrets and variables → Actions
|
||||||
|
- Create a new secret named `SSH_SIGNING_KEY`
|
||||||
|
- Paste the contents of `~/.ssh/signing_key`
|
||||||
|
|
||||||
|
4. Get your GitHub user ID:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gh api users/YOUR_USERNAME --jq '.id'
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Update your workflow with `bot_id` and `bot_name` matching the account where you added the signing key.
|
||||||
|
|
||||||
|
**Note:** If both `ssh_signing_key` and `use_commit_signing` are provided, `ssh_signing_key` takes precedence.
|
||||||
|
|
||||||
## ⚠️ Authentication Protection
|
## ⚠️ Authentication Protection
|
||||||
|
|
||||||
|
|||||||
@@ -71,9 +71,10 @@ jobs:
|
|||||||
| `branch_prefix` | The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format) | No | `claude/` |
|
| `branch_prefix` | The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format) | No | `claude/` |
|
||||||
| `settings` | Claude Code settings as JSON string or path to settings JSON file | No | "" |
|
| `settings` | Claude Code settings as JSON string or path to settings JSON file | No | "" |
|
||||||
| `additional_permissions` | Additional permissions to enable. Currently supports 'actions: read' for viewing workflow results | No | "" |
|
| `additional_permissions` | Additional permissions to enable. Currently supports 'actions: read' for viewing workflow results | No | "" |
|
||||||
| `use_commit_signing` | Enable commit signing using GitHub's commit signature verification. When false, Claude uses standard git commands | No | `false` |
|
| `use_commit_signing` | Enable commit signing using GitHub's API. Simple but cannot perform complex git operations like rebasing. See [Security](./security.md#commit-signing) | No | `false` |
|
||||||
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID) | No | `41898282` |
|
| `ssh_signing_key` | SSH private key for signing commits. Enables signed commits with full git CLI support (rebasing, etc.). See [Security](./security.md#commit-signing) | No | "" |
|
||||||
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name) | No | `claude[bot]` |
|
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID). Required with `ssh_signing_key` for verified commits | No | `41898282` |
|
||||||
|
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name). Required with `ssh_signing_key` for verified commits | No | `claude[bot]` |
|
||||||
| `allowed_bots` | Comma-separated list of allowed bot usernames, or '\*' to allow all bots. Empty string (default) allows no bots | No | "" |
|
| `allowed_bots` | Comma-separated list of allowed bot usernames, or '\*' to allow all bots. Empty string (default) allows no bots | No | "" |
|
||||||
| `allowed_non_write_users` | **⚠️ RISKY**: Comma-separated list of usernames to allow without write permissions, or '\*' for all users. Only works with `github_token` input. See [Security](./security.md) | No | "" |
|
| `allowed_non_write_users` | **⚠️ RISKY**: Comma-separated list of usernames to allow without write permissions, or '\*' for all users. Only works with `github_token` input. See [Security](./security.md) | No | "" |
|
||||||
| `path_to_claude_code_executable` | Optional path to a custom Claude Code executable. Skips automatic installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
| `path_to_claude_code_executable` | Optional path to a custom Claude Code executable. Skips automatic installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.1",
|
"@actions/github": "^6.0.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||||
"@octokit/graphql": "^8.2.2",
|
"@octokit/graphql": "^8.2.2",
|
||||||
"@octokit/rest": "^21.1.1",
|
"@octokit/rest": "^21.1.1",
|
||||||
|
|||||||
@@ -21,8 +21,12 @@ import type { ParsedGitHubContext } from "../github/context";
|
|||||||
import type { CommonFields, PreparedContext, EventData } from "./types";
|
import type { CommonFields, PreparedContext, EventData } from "./types";
|
||||||
import { GITHUB_SERVER_URL } from "../github/api/config";
|
import { GITHUB_SERVER_URL } from "../github/api/config";
|
||||||
import type { Mode, ModeContext } from "../modes/types";
|
import type { Mode, ModeContext } from "../modes/types";
|
||||||
|
import { extractUserRequest } from "../utils/extract-user-request";
|
||||||
export type { CommonFields, PreparedContext } from "./types";
|
export type { CommonFields, PreparedContext } from "./types";
|
||||||
|
|
||||||
|
/** Filename for the user request file, read by the SDK runner */
|
||||||
|
const USER_REQUEST_FILENAME = "claude-user-request.txt";
|
||||||
|
|
||||||
// Tag mode defaults - these tools are needed for tag mode to function
|
// Tag mode defaults - these tools are needed for tag mode to function
|
||||||
const BASE_ALLOWED_TOOLS = [
|
const BASE_ALLOWED_TOOLS = [
|
||||||
"Edit",
|
"Edit",
|
||||||
@@ -847,6 +851,55 @@ f. If you are unable to complete certain steps, such as running a linter or test
|
|||||||
return promptContent;
|
return promptContent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the user's request from the prepared context and GitHub data.
|
||||||
|
*
|
||||||
|
* This is used to send the user's actual command/request as a separate
|
||||||
|
* content block, enabling slash command processing in the CLI.
|
||||||
|
*
|
||||||
|
* @param context - The prepared context containing event data and trigger phrase
|
||||||
|
* @param githubData - The fetched GitHub data containing issue/PR body content
|
||||||
|
* @returns The extracted user request text (e.g., "/review-pr" or "fix this bug"),
|
||||||
|
* or null for assigned/labeled events without an explicit trigger in the body
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Comment event: "@claude /review-pr" -> returns "/review-pr"
|
||||||
|
* // Issue body with "@claude fix this" -> returns "fix this"
|
||||||
|
* // Issue assigned without @claude in body -> returns null
|
||||||
|
*/
|
||||||
|
function extractUserRequestFromContext(
|
||||||
|
context: PreparedContext,
|
||||||
|
githubData: FetchDataResult,
|
||||||
|
): string | null {
|
||||||
|
const { eventData, triggerPhrase } = context;
|
||||||
|
|
||||||
|
// For comment events, extract from comment body
|
||||||
|
if (
|
||||||
|
"commentBody" in eventData &&
|
||||||
|
eventData.commentBody &&
|
||||||
|
(eventData.eventName === "issue_comment" ||
|
||||||
|
eventData.eventName === "pull_request_review_comment" ||
|
||||||
|
eventData.eventName === "pull_request_review")
|
||||||
|
) {
|
||||||
|
return extractUserRequest(eventData.commentBody, triggerPhrase);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For issue/PR events triggered by body content, extract from the body
|
||||||
|
if (githubData.contextData?.body) {
|
||||||
|
const request = extractUserRequest(
|
||||||
|
githubData.contextData.body,
|
||||||
|
triggerPhrase,
|
||||||
|
);
|
||||||
|
if (request) {
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For assigned/labeled events without explicit trigger in body,
|
||||||
|
// return null to indicate the full context should be used
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
export async function createPrompt(
|
export async function createPrompt(
|
||||||
mode: Mode,
|
mode: Mode,
|
||||||
modeContext: ModeContext,
|
modeContext: ModeContext,
|
||||||
@@ -895,6 +948,22 @@ export async function createPrompt(
|
|||||||
promptContent,
|
promptContent,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Extract and write the user request separately for SDK multi-block messaging
|
||||||
|
// This allows the CLI to process slash commands (e.g., "@claude /review-pr")
|
||||||
|
const userRequest = extractUserRequestFromContext(
|
||||||
|
preparedContext,
|
||||||
|
githubData,
|
||||||
|
);
|
||||||
|
if (userRequest) {
|
||||||
|
await writeFile(
|
||||||
|
`${process.env.RUNNER_TEMP || "/tmp"}/claude-prompts/${USER_REQUEST_FILENAME}`,
|
||||||
|
userRequest,
|
||||||
|
);
|
||||||
|
console.log("===== USER REQUEST =====");
|
||||||
|
console.log(userRequest);
|
||||||
|
console.log("========================");
|
||||||
|
}
|
||||||
|
|
||||||
// Set allowed tools
|
// Set allowed tools
|
||||||
const hasActionsReadPermission = false;
|
const hasActionsReadPermission = false;
|
||||||
|
|
||||||
|
|||||||
21
src/entrypoints/cleanup-ssh-signing.ts
Normal file
21
src/entrypoints/cleanup-ssh-signing.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup SSH signing key after action completes
|
||||||
|
* This is run as a post step for security purposes
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { cleanupSshSigning } from "../github/operations/git-config";
|
||||||
|
|
||||||
|
async function run() {
|
||||||
|
try {
|
||||||
|
await cleanupSshSigning();
|
||||||
|
} catch (error) {
|
||||||
|
// Don't fail the action if cleanup fails, just log it
|
||||||
|
console.error("Failed to cleanup SSH signing key:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (import.meta.main) {
|
||||||
|
run();
|
||||||
|
}
|
||||||
@@ -26,6 +26,7 @@ export function collectActionInputsPresence(): void {
|
|||||||
max_turns: "",
|
max_turns: "",
|
||||||
use_sticky_comment: "false",
|
use_sticky_comment: "false",
|
||||||
use_commit_signing: "false",
|
use_commit_signing: "false",
|
||||||
|
ssh_signing_key: "",
|
||||||
};
|
};
|
||||||
|
|
||||||
const allInputsJson = process.env.ALL_INPUTS;
|
const allInputsJson = process.env.ALL_INPUTS;
|
||||||
|
|||||||
@@ -18,6 +18,11 @@ export const PR_QUERY = `
|
|||||||
additions
|
additions
|
||||||
deletions
|
deletions
|
||||||
state
|
state
|
||||||
|
labels(first: 1) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
commits(first: 100) {
|
commits(first: 100) {
|
||||||
totalCount
|
totalCount
|
||||||
nodes {
|
nodes {
|
||||||
@@ -101,6 +106,11 @@ export const ISSUE_QUERY = `
|
|||||||
updatedAt
|
updatedAt
|
||||||
lastEditedAt
|
lastEditedAt
|
||||||
state
|
state
|
||||||
|
labels(first: 1) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
comments(first: 100) {
|
comments(first: 100) {
|
||||||
nodes {
|
nodes {
|
||||||
id
|
id
|
||||||
|
|||||||
@@ -88,8 +88,10 @@ type BaseContext = {
|
|||||||
labelTrigger: string;
|
labelTrigger: string;
|
||||||
baseBranch?: string;
|
baseBranch?: string;
|
||||||
branchPrefix: string;
|
branchPrefix: string;
|
||||||
|
branchNameTemplate?: string;
|
||||||
useStickyComment: boolean;
|
useStickyComment: boolean;
|
||||||
useCommitSigning: boolean;
|
useCommitSigning: boolean;
|
||||||
|
sshSigningKey: string;
|
||||||
botId: string;
|
botId: string;
|
||||||
botName: string;
|
botName: string;
|
||||||
allowedBots: string;
|
allowedBots: string;
|
||||||
@@ -144,8 +146,10 @@ export function parseGitHubContext(): GitHubContext {
|
|||||||
labelTrigger: process.env.LABEL_TRIGGER ?? "",
|
labelTrigger: process.env.LABEL_TRIGGER ?? "",
|
||||||
baseBranch: process.env.BASE_BRANCH,
|
baseBranch: process.env.BASE_BRANCH,
|
||||||
branchPrefix: process.env.BRANCH_PREFIX ?? "claude/",
|
branchPrefix: process.env.BRANCH_PREFIX ?? "claude/",
|
||||||
|
branchNameTemplate: process.env.BRANCH_NAME_TEMPLATE,
|
||||||
useStickyComment: process.env.USE_STICKY_COMMENT === "true",
|
useStickyComment: process.env.USE_STICKY_COMMENT === "true",
|
||||||
useCommitSigning: process.env.USE_COMMIT_SIGNING === "true",
|
useCommitSigning: process.env.USE_COMMIT_SIGNING === "true",
|
||||||
|
sshSigningKey: process.env.SSH_SIGNING_KEY || "",
|
||||||
botId: process.env.BOT_ID ?? String(CLAUDE_APP_BOT_ID),
|
botId: process.env.BOT_ID ?? String(CLAUDE_APP_BOT_ID),
|
||||||
botName: process.env.BOT_NAME ?? CLAUDE_BOT_LOGIN,
|
botName: process.env.BOT_NAME ?? CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: process.env.ALLOWED_BOTS ?? "",
|
allowedBots: process.env.ALLOWED_BOTS ?? "",
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import type { Octokits } from "../api/client";
|
|||||||
import { ISSUE_QUERY, PR_QUERY, USER_QUERY } from "../api/queries/github";
|
import { ISSUE_QUERY, PR_QUERY, USER_QUERY } from "../api/queries/github";
|
||||||
import {
|
import {
|
||||||
isIssueCommentEvent,
|
isIssueCommentEvent,
|
||||||
|
isIssuesEvent,
|
||||||
|
isPullRequestEvent,
|
||||||
isPullRequestReviewEvent,
|
isPullRequestReviewEvent,
|
||||||
isPullRequestReviewCommentEvent,
|
isPullRequestReviewCommentEvent,
|
||||||
type ParsedGitHubContext,
|
type ParsedGitHubContext,
|
||||||
@@ -40,6 +42,31 @@ export function extractTriggerTimestamp(
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the original title from the GitHub webhook payload.
|
||||||
|
* This is the title as it existed when the trigger event occurred.
|
||||||
|
*
|
||||||
|
* @param context - Parsed GitHub context from webhook
|
||||||
|
* @returns The original title string or undefined if not available
|
||||||
|
*/
|
||||||
|
export function extractOriginalTitle(
|
||||||
|
context: ParsedGitHubContext,
|
||||||
|
): string | undefined {
|
||||||
|
if (isIssueCommentEvent(context)) {
|
||||||
|
return context.payload.issue?.title;
|
||||||
|
} else if (isPullRequestEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isPullRequestReviewEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isPullRequestReviewCommentEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isIssuesEvent(context)) {
|
||||||
|
return context.payload.issue?.title;
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filters comments to only include those that existed in their final state before the trigger time.
|
* Filters comments to only include those that existed in their final state before the trigger time.
|
||||||
* This prevents malicious actors from editing comments after the trigger to inject harmful content.
|
* This prevents malicious actors from editing comments after the trigger to inject harmful content.
|
||||||
@@ -146,6 +173,7 @@ type FetchDataParams = {
|
|||||||
isPR: boolean;
|
isPR: boolean;
|
||||||
triggerUsername?: string;
|
triggerUsername?: string;
|
||||||
triggerTime?: string;
|
triggerTime?: string;
|
||||||
|
originalTitle?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type GitHubFileWithSHA = GitHubFile & {
|
export type GitHubFileWithSHA = GitHubFile & {
|
||||||
@@ -169,6 +197,7 @@ export async function fetchGitHubData({
|
|||||||
isPR,
|
isPR,
|
||||||
triggerUsername,
|
triggerUsername,
|
||||||
triggerTime,
|
triggerTime,
|
||||||
|
originalTitle,
|
||||||
}: FetchDataParams): Promise<FetchDataResult> {
|
}: FetchDataParams): Promise<FetchDataResult> {
|
||||||
const [owner, repo] = repository.split("/");
|
const [owner, repo] = repository.split("/");
|
||||||
if (!owner || !repo) {
|
if (!owner || !repo) {
|
||||||
@@ -354,6 +383,11 @@ export async function fetchGitHubData({
|
|||||||
triggerDisplayName = await fetchUserDisplayName(octokits, triggerUsername);
|
triggerDisplayName = await fetchUserDisplayName(octokits, triggerUsername);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use the original title from the webhook payload if provided
|
||||||
|
if (originalTitle !== undefined) {
|
||||||
|
contextData.title = originalTitle;
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
contextData,
|
contextData,
|
||||||
comments,
|
comments,
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ export function formatContext(
|
|||||||
): string {
|
): string {
|
||||||
if (isPR) {
|
if (isPR) {
|
||||||
const prData = contextData as GitHubPullRequest;
|
const prData = contextData as GitHubPullRequest;
|
||||||
return `PR Title: ${prData.title}
|
const sanitizedTitle = sanitizeContent(prData.title);
|
||||||
|
return `PR Title: ${sanitizedTitle}
|
||||||
PR Author: ${prData.author.login}
|
PR Author: ${prData.author.login}
|
||||||
PR Branch: ${prData.headRefName} -> ${prData.baseRefName}
|
PR Branch: ${prData.headRefName} -> ${prData.baseRefName}
|
||||||
PR State: ${prData.state}
|
PR State: ${prData.state}
|
||||||
@@ -24,7 +25,8 @@ Total Commits: ${prData.commits.totalCount}
|
|||||||
Changed Files: ${prData.files.nodes.length} files`;
|
Changed Files: ${prData.files.nodes.length} files`;
|
||||||
} else {
|
} else {
|
||||||
const issueData = contextData as GitHubIssue;
|
const issueData = contextData as GitHubIssue;
|
||||||
return `Issue Title: ${issueData.title}
|
const sanitizedTitle = sanitizeContent(issueData.title);
|
||||||
|
return `Issue Title: ${sanitizedTitle}
|
||||||
Issue Author: ${issueData.author.login}
|
Issue Author: ${issueData.author.login}
|
||||||
Issue State: ${issueData.state}`;
|
Issue State: ${issueData.state}`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,12 +6,22 @@
|
|||||||
* - For Issues: Create a new branch
|
* - For Issues: Create a new branch
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { $ } from "bun";
|
||||||
import { execFileSync } from "child_process";
|
import { execFileSync } from "child_process";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import type { ParsedGitHubContext } from "../context";
|
import type { ParsedGitHubContext } from "../context";
|
||||||
import type { GitHubPullRequest } from "../types";
|
import type { GitHubPullRequest } from "../types";
|
||||||
import type { Octokits } from "../api/client";
|
import type { Octokits } from "../api/client";
|
||||||
import type { FetchDataResult } from "../data/fetcher";
|
import type { FetchDataResult } from "../data/fetcher";
|
||||||
|
import { generateBranchName } from "../../utils/branch-template";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the first label from GitHub data, or returns undefined if no labels exist
|
||||||
|
*/
|
||||||
|
function extractFirstLabel(githubData: FetchDataResult): string | undefined {
|
||||||
|
const labels = githubData.contextData.labels?.nodes;
|
||||||
|
return labels && labels.length > 0 ? labels[0]?.name : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates a git branch name against a strict whitelist pattern.
|
* Validates a git branch name against a strict whitelist pattern.
|
||||||
@@ -125,7 +135,7 @@ export async function setupBranch(
|
|||||||
): Promise<BranchInfo> {
|
): Promise<BranchInfo> {
|
||||||
const { owner, repo } = context.repository;
|
const { owner, repo } = context.repository;
|
||||||
const entityNumber = context.entityNumber;
|
const entityNumber = context.entityNumber;
|
||||||
const { baseBranch, branchPrefix } = context.inputs;
|
const { baseBranch, branchPrefix, branchNameTemplate } = context.inputs;
|
||||||
const isPR = context.isPR;
|
const isPR = context.isPR;
|
||||||
|
|
||||||
if (isPR) {
|
if (isPR) {
|
||||||
@@ -191,17 +201,8 @@ export async function setupBranch(
|
|||||||
// Generate branch name for either an issue or closed/merged PR
|
// Generate branch name for either an issue or closed/merged PR
|
||||||
const entityType = isPR ? "pr" : "issue";
|
const entityType = isPR ? "pr" : "issue";
|
||||||
|
|
||||||
// Create Kubernetes-compatible timestamp: lowercase, hyphens only, shorter format
|
// Get the SHA of the source branch to use in template
|
||||||
const now = new Date();
|
let sourceSHA: string | undefined;
|
||||||
const timestamp = `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`;
|
|
||||||
|
|
||||||
// Ensure branch name is Kubernetes-compatible:
|
|
||||||
// - Lowercase only
|
|
||||||
// - Alphanumeric with hyphens
|
|
||||||
// - No underscores
|
|
||||||
// - Max 50 chars (to allow for prefixes)
|
|
||||||
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${timestamp}`;
|
|
||||||
const newBranch = branchName.toLowerCase().substring(0, 50);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get the SHA of the source branch to verify it exists
|
// Get the SHA of the source branch to verify it exists
|
||||||
@@ -211,8 +212,46 @@ export async function setupBranch(
|
|||||||
ref: `heads/${sourceBranch}`,
|
ref: `heads/${sourceBranch}`,
|
||||||
});
|
});
|
||||||
|
|
||||||
const currentSHA = sourceBranchRef.data.object.sha;
|
sourceSHA = sourceBranchRef.data.object.sha;
|
||||||
console.log(`Source branch SHA: ${currentSHA}`);
|
console.log(`Source branch SHA: ${sourceSHA}`);
|
||||||
|
|
||||||
|
// Extract first label from GitHub data
|
||||||
|
const firstLabel = extractFirstLabel(githubData);
|
||||||
|
|
||||||
|
// Extract title from GitHub data
|
||||||
|
const title = githubData.contextData.title;
|
||||||
|
|
||||||
|
// Generate branch name using template or default format
|
||||||
|
let newBranch = generateBranchName(
|
||||||
|
branchNameTemplate,
|
||||||
|
branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
sourceSHA,
|
||||||
|
firstLabel,
|
||||||
|
title,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if generated branch already exists on remote
|
||||||
|
try {
|
||||||
|
await $`git ls-remote --exit-code origin refs/heads/${newBranch}`.quiet();
|
||||||
|
|
||||||
|
// If we get here, branch exists (exit code 0)
|
||||||
|
console.log(
|
||||||
|
`Branch '${newBranch}' already exists, falling back to default format`,
|
||||||
|
);
|
||||||
|
newBranch = generateBranchName(
|
||||||
|
undefined, // Force default template
|
||||||
|
branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
sourceSHA,
|
||||||
|
firstLabel,
|
||||||
|
title,
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
// Branch doesn't exist (non-zero exit code), continue with generated name
|
||||||
|
}
|
||||||
|
|
||||||
// For commit signing, defer branch creation to the file ops server
|
// For commit signing, defer branch creation to the file ops server
|
||||||
if (context.inputs.useCommitSigning) {
|
if (context.inputs.useCommitSigning) {
|
||||||
|
|||||||
@@ -6,9 +6,14 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { $ } from "bun";
|
import { $ } from "bun";
|
||||||
|
import { mkdir, writeFile, rm } from "fs/promises";
|
||||||
|
import { join } from "path";
|
||||||
|
import { homedir } from "os";
|
||||||
import type { GitHubContext } from "../context";
|
import type { GitHubContext } from "../context";
|
||||||
import { GITHUB_SERVER_URL } from "../api/config";
|
import { GITHUB_SERVER_URL } from "../api/config";
|
||||||
|
|
||||||
|
const SSH_SIGNING_KEY_PATH = join(homedir(), ".ssh", "claude_signing_key");
|
||||||
|
|
||||||
type GitUser = {
|
type GitUser = {
|
||||||
login: string;
|
login: string;
|
||||||
id: number;
|
id: number;
|
||||||
@@ -54,3 +59,50 @@ export async function configureGitAuth(
|
|||||||
|
|
||||||
console.log("Git authentication configured successfully");
|
console.log("Git authentication configured successfully");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure git to use SSH signing for commits
|
||||||
|
* This is an alternative to GitHub API-based commit signing (use_commit_signing)
|
||||||
|
*/
|
||||||
|
export async function setupSshSigning(sshSigningKey: string): Promise<void> {
|
||||||
|
console.log("Configuring SSH signing for commits...");
|
||||||
|
|
||||||
|
// Validate SSH key format
|
||||||
|
if (!sshSigningKey.trim()) {
|
||||||
|
throw new Error("SSH signing key cannot be empty");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
!sshSigningKey.includes("BEGIN") ||
|
||||||
|
!sshSigningKey.includes("PRIVATE KEY")
|
||||||
|
) {
|
||||||
|
throw new Error("Invalid SSH private key format");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create .ssh directory with secure permissions (700)
|
||||||
|
const sshDir = join(homedir(), ".ssh");
|
||||||
|
await mkdir(sshDir, { recursive: true, mode: 0o700 });
|
||||||
|
|
||||||
|
// Write the signing key atomically with secure permissions (600)
|
||||||
|
await writeFile(SSH_SIGNING_KEY_PATH, sshSigningKey, { mode: 0o600 });
|
||||||
|
console.log(`✓ SSH signing key written to ${SSH_SIGNING_KEY_PATH}`);
|
||||||
|
|
||||||
|
// Configure git to use SSH signing
|
||||||
|
await $`git config gpg.format ssh`;
|
||||||
|
await $`git config user.signingkey ${SSH_SIGNING_KEY_PATH}`;
|
||||||
|
await $`git config commit.gpgsign true`;
|
||||||
|
|
||||||
|
console.log("✓ Git configured to use SSH signing for commits");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up the SSH signing key file
|
||||||
|
* Should be called in the post step for security
|
||||||
|
*/
|
||||||
|
export async function cleanupSshSigning(): Promise<void> {
|
||||||
|
try {
|
||||||
|
await rm(SSH_SIGNING_KEY_PATH, { force: true });
|
||||||
|
console.log("✓ SSH signing key cleaned up");
|
||||||
|
} catch (error) {
|
||||||
|
console.log("No SSH signing key to clean up");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -63,6 +63,11 @@ export type GitHubPullRequest = {
|
|||||||
additions: number;
|
additions: number;
|
||||||
deletions: number;
|
deletions: number;
|
||||||
state: string;
|
state: string;
|
||||||
|
labels: {
|
||||||
|
nodes: Array<{
|
||||||
|
name: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
commits: {
|
commits: {
|
||||||
totalCount: number;
|
totalCount: number;
|
||||||
nodes: Array<{
|
nodes: Array<{
|
||||||
@@ -88,6 +93,11 @@ export type GitHubIssue = {
|
|||||||
updatedAt?: string;
|
updatedAt?: string;
|
||||||
lastEditedAt?: string;
|
lastEditedAt?: string;
|
||||||
state: string;
|
state: string;
|
||||||
|
labels: {
|
||||||
|
nodes: Array<{
|
||||||
|
name: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
comments: {
|
comments: {
|
||||||
nodes: GitHubComment[];
|
nodes: GitHubComment[];
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|||||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { readFile, stat } from "fs/promises";
|
import { readFile, stat } from "fs/promises";
|
||||||
import { join } from "path";
|
import { resolve } from "path";
|
||||||
import { constants } from "fs";
|
import { constants } from "fs";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { GITHUB_API_URL } from "../github/api/config";
|
import { GITHUB_API_URL } from "../github/api/config";
|
||||||
import { retryWithBackoff } from "../utils/retry";
|
import { retryWithBackoff } from "../utils/retry";
|
||||||
|
import { validatePathWithinRepo } from "./path-validation";
|
||||||
|
|
||||||
type GitHubRef = {
|
type GitHubRef = {
|
||||||
object: {
|
object: {
|
||||||
@@ -213,12 +214,18 @@ server.tool(
|
|||||||
throw new Error("GITHUB_TOKEN environment variable is required");
|
throw new Error("GITHUB_TOKEN environment variable is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
const processedFiles = files.map((filePath) => {
|
// Validate all paths are within repository root and get full/relative paths
|
||||||
if (filePath.startsWith("/")) {
|
const resolvedRepoDir = resolve(REPO_DIR);
|
||||||
return filePath.slice(1);
|
const validatedFiles = await Promise.all(
|
||||||
}
|
files.map(async (filePath) => {
|
||||||
return filePath;
|
const fullPath = await validatePathWithinRepo(filePath, REPO_DIR);
|
||||||
});
|
// Calculate the relative path for the git tree entry
|
||||||
|
// Use the original filePath (normalized) for the git path, not the symlink-resolved path
|
||||||
|
const normalizedPath = resolve(resolvedRepoDir, filePath);
|
||||||
|
const relativePath = normalizedPath.slice(resolvedRepoDir.length + 1);
|
||||||
|
return { fullPath, relativePath };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
// 1. Get the branch reference (create if doesn't exist)
|
// 1. Get the branch reference (create if doesn't exist)
|
||||||
const baseSha = await getOrCreateBranchRef(
|
const baseSha = await getOrCreateBranchRef(
|
||||||
@@ -247,18 +254,14 @@ server.tool(
|
|||||||
|
|
||||||
// 3. Create tree entries for all files
|
// 3. Create tree entries for all files
|
||||||
const treeEntries = await Promise.all(
|
const treeEntries = await Promise.all(
|
||||||
processedFiles.map(async (filePath) => {
|
validatedFiles.map(async ({ fullPath, relativePath }) => {
|
||||||
const fullPath = filePath.startsWith("/")
|
|
||||||
? filePath
|
|
||||||
: join(REPO_DIR, filePath);
|
|
||||||
|
|
||||||
// Get the proper file mode based on file permissions
|
// Get the proper file mode based on file permissions
|
||||||
const fileMode = await getFileMode(fullPath);
|
const fileMode = await getFileMode(fullPath);
|
||||||
|
|
||||||
// Check if file is binary (images, etc.)
|
// Check if file is binary (images, etc.)
|
||||||
const isBinaryFile =
|
const isBinaryFile =
|
||||||
/\.(png|jpg|jpeg|gif|webp|ico|pdf|zip|tar|gz|exe|bin|woff|woff2|ttf|eot)$/i.test(
|
/\.(png|jpg|jpeg|gif|webp|ico|pdf|zip|tar|gz|exe|bin|woff|woff2|ttf|eot)$/i.test(
|
||||||
filePath,
|
relativePath,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isBinaryFile) {
|
if (isBinaryFile) {
|
||||||
@@ -284,7 +287,7 @@ server.tool(
|
|||||||
if (!blobResponse.ok) {
|
if (!blobResponse.ok) {
|
||||||
const errorText = await blobResponse.text();
|
const errorText = await blobResponse.text();
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to create blob for ${filePath}: ${blobResponse.status} - ${errorText}`,
|
`Failed to create blob for ${relativePath}: ${blobResponse.status} - ${errorText}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -292,7 +295,7 @@ server.tool(
|
|||||||
|
|
||||||
// Return tree entry with blob SHA
|
// Return tree entry with blob SHA
|
||||||
return {
|
return {
|
||||||
path: filePath,
|
path: relativePath,
|
||||||
mode: fileMode,
|
mode: fileMode,
|
||||||
type: "blob",
|
type: "blob",
|
||||||
sha: blobData.sha,
|
sha: blobData.sha,
|
||||||
@@ -301,7 +304,7 @@ server.tool(
|
|||||||
// For text files, include content directly in tree
|
// For text files, include content directly in tree
|
||||||
const content = await readFile(fullPath, "utf-8");
|
const content = await readFile(fullPath, "utf-8");
|
||||||
return {
|
return {
|
||||||
path: filePath,
|
path: relativePath,
|
||||||
mode: fileMode,
|
mode: fileMode,
|
||||||
type: "blob",
|
type: "blob",
|
||||||
content: content,
|
content: content,
|
||||||
@@ -421,7 +424,9 @@ server.tool(
|
|||||||
author: newCommitData.author.name,
|
author: newCommitData.author.name,
|
||||||
date: newCommitData.author.date,
|
date: newCommitData.author.date,
|
||||||
},
|
},
|
||||||
files: processedFiles.map((path) => ({ path })),
|
files: validatedFiles.map(({ relativePath }) => ({
|
||||||
|
path: relativePath,
|
||||||
|
})),
|
||||||
tree: {
|
tree: {
|
||||||
sha: treeData.sha,
|
sha: treeData.sha,
|
||||||
},
|
},
|
||||||
|
|||||||
64
src/mcp/path-validation.ts
Normal file
64
src/mcp/path-validation.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { realpath } from "fs/promises";
|
||||||
|
import { resolve, sep } from "path";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates that a file path resolves within the repository root.
|
||||||
|
* Prevents path traversal attacks via "../" sequences and symlinks.
|
||||||
|
* @param filePath - The file path to validate (can be relative or absolute)
|
||||||
|
* @param repoRoot - The repository root directory
|
||||||
|
* @returns The resolved absolute path (with symlinks resolved) if valid
|
||||||
|
* @throws Error if the path resolves outside the repository root
|
||||||
|
*/
|
||||||
|
export async function validatePathWithinRepo(
|
||||||
|
filePath: string,
|
||||||
|
repoRoot: string,
|
||||||
|
): Promise<string> {
|
||||||
|
// First resolve the path string (handles .. and . segments)
|
||||||
|
const initialPath = resolve(repoRoot, filePath);
|
||||||
|
|
||||||
|
// Resolve symlinks to get the real path
|
||||||
|
// This prevents symlink attacks where a link inside the repo points outside
|
||||||
|
let resolvedRoot: string;
|
||||||
|
let resolvedPath: string;
|
||||||
|
|
||||||
|
try {
|
||||||
|
resolvedRoot = await realpath(repoRoot);
|
||||||
|
} catch {
|
||||||
|
throw new Error(`Repository root '${repoRoot}' does not exist`);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
resolvedPath = await realpath(initialPath);
|
||||||
|
} catch {
|
||||||
|
// File doesn't exist yet - fall back to checking the parent directory
|
||||||
|
// This handles the case where we're creating a new file
|
||||||
|
const parentDir = resolve(initialPath, "..");
|
||||||
|
try {
|
||||||
|
const resolvedParent = await realpath(parentDir);
|
||||||
|
if (
|
||||||
|
resolvedParent !== resolvedRoot &&
|
||||||
|
!resolvedParent.startsWith(resolvedRoot + sep)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Path '${filePath}' resolves outside the repository root`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Parent is valid, return the initial path since file doesn't exist yet
|
||||||
|
return initialPath;
|
||||||
|
} catch {
|
||||||
|
throw new Error(
|
||||||
|
`Path '${filePath}' resolves outside the repository root`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path must be within repo root (or be the root itself)
|
||||||
|
if (
|
||||||
|
resolvedPath !== resolvedRoot &&
|
||||||
|
!resolvedPath.startsWith(resolvedRoot + sep)
|
||||||
|
) {
|
||||||
|
throw new Error(`Path '${filePath}' resolves outside the repository root`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedPath;
|
||||||
|
}
|
||||||
@@ -4,7 +4,10 @@ import type { Mode, ModeOptions, ModeResult } from "../types";
|
|||||||
import type { PreparedContext } from "../../create-prompt/types";
|
import type { PreparedContext } from "../../create-prompt/types";
|
||||||
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
||||||
import { parseAllowedTools } from "./parse-tools";
|
import { parseAllowedTools } from "./parse-tools";
|
||||||
import { configureGitAuth } from "../../github/operations/git-config";
|
import {
|
||||||
|
configureGitAuth,
|
||||||
|
setupSshSigning,
|
||||||
|
} from "../../github/operations/git-config";
|
||||||
import type { GitHubContext } from "../../github/context";
|
import type { GitHubContext } from "../../github/context";
|
||||||
import { isEntityContext } from "../../github/context";
|
import { isEntityContext } from "../../github/context";
|
||||||
|
|
||||||
@@ -79,7 +82,27 @@ export const agentMode: Mode = {
|
|||||||
|
|
||||||
async prepare({ context, githubToken }: ModeOptions): Promise<ModeResult> {
|
async prepare({ context, githubToken }: ModeOptions): Promise<ModeResult> {
|
||||||
// Configure git authentication for agent mode (same as tag mode)
|
// Configure git authentication for agent mode (same as tag mode)
|
||||||
if (!context.inputs.useCommitSigning) {
|
// SSH signing takes precedence if provided
|
||||||
|
const useSshSigning = !!context.inputs.sshSigningKey;
|
||||||
|
const useApiCommitSigning =
|
||||||
|
context.inputs.useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
if (useSshSigning) {
|
||||||
|
// Setup SSH signing for commits
|
||||||
|
await setupSshSigning(context.inputs.sshSigningKey);
|
||||||
|
|
||||||
|
// Still configure git auth for push operations (user/email and remote URL)
|
||||||
|
const user = {
|
||||||
|
login: context.inputs.botName,
|
||||||
|
id: parseInt(context.inputs.botId),
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
await configureGitAuth(githubToken, context, user);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to configure git authentication:", error);
|
||||||
|
// Continue anyway - git operations may still work with default config
|
||||||
|
}
|
||||||
|
} else if (!useApiCommitSigning) {
|
||||||
// Use bot_id and bot_name from inputs directly
|
// Use bot_id and bot_name from inputs directly
|
||||||
const user = {
|
const user = {
|
||||||
login: context.inputs.botName,
|
login: context.inputs.botName,
|
||||||
|
|||||||
@@ -4,11 +4,15 @@ import { checkContainsTrigger } from "../../github/validation/trigger";
|
|||||||
import { checkHumanActor } from "../../github/validation/actor";
|
import { checkHumanActor } from "../../github/validation/actor";
|
||||||
import { createInitialComment } from "../../github/operations/comments/create-initial";
|
import { createInitialComment } from "../../github/operations/comments/create-initial";
|
||||||
import { setupBranch } from "../../github/operations/branch";
|
import { setupBranch } from "../../github/operations/branch";
|
||||||
import { configureGitAuth } from "../../github/operations/git-config";
|
import {
|
||||||
|
configureGitAuth,
|
||||||
|
setupSshSigning,
|
||||||
|
} from "../../github/operations/git-config";
|
||||||
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
||||||
import {
|
import {
|
||||||
fetchGitHubData,
|
fetchGitHubData,
|
||||||
extractTriggerTimestamp,
|
extractTriggerTimestamp,
|
||||||
|
extractOriginalTitle,
|
||||||
} from "../../github/data/fetcher";
|
} from "../../github/data/fetcher";
|
||||||
import { createPrompt, generateDefaultPrompt } from "../../create-prompt";
|
import { createPrompt, generateDefaultPrompt } from "../../create-prompt";
|
||||||
import { isEntityContext } from "../../github/context";
|
import { isEntityContext } from "../../github/context";
|
||||||
@@ -75,6 +79,7 @@ export const tagMode: Mode = {
|
|||||||
const commentId = commentData.id;
|
const commentId = commentData.id;
|
||||||
|
|
||||||
const triggerTime = extractTriggerTimestamp(context);
|
const triggerTime = extractTriggerTimestamp(context);
|
||||||
|
const originalTitle = extractOriginalTitle(context);
|
||||||
|
|
||||||
const githubData = await fetchGitHubData({
|
const githubData = await fetchGitHubData({
|
||||||
octokits: octokit,
|
octokits: octokit,
|
||||||
@@ -83,13 +88,34 @@ export const tagMode: Mode = {
|
|||||||
isPR: context.isPR,
|
isPR: context.isPR,
|
||||||
triggerUsername: context.actor,
|
triggerUsername: context.actor,
|
||||||
triggerTime,
|
triggerTime,
|
||||||
|
originalTitle,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup branch
|
// Setup branch
|
||||||
const branchInfo = await setupBranch(octokit, githubData, context);
|
const branchInfo = await setupBranch(octokit, githubData, context);
|
||||||
|
|
||||||
// Configure git authentication if not using commit signing
|
// Configure git authentication
|
||||||
if (!context.inputs.useCommitSigning) {
|
// SSH signing takes precedence if provided
|
||||||
|
const useSshSigning = !!context.inputs.sshSigningKey;
|
||||||
|
const useApiCommitSigning =
|
||||||
|
context.inputs.useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
if (useSshSigning) {
|
||||||
|
// Setup SSH signing for commits
|
||||||
|
await setupSshSigning(context.inputs.sshSigningKey);
|
||||||
|
|
||||||
|
// Still configure git auth for push operations (user/email and remote URL)
|
||||||
|
const user = {
|
||||||
|
login: context.inputs.botName,
|
||||||
|
id: parseInt(context.inputs.botId),
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
await configureGitAuth(githubToken, context, user);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to configure git authentication:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
} else if (!useApiCommitSigning) {
|
||||||
// Use bot_id and bot_name from inputs directly
|
// Use bot_id and bot_name from inputs directly
|
||||||
const user = {
|
const user = {
|
||||||
login: context.inputs.botName,
|
login: context.inputs.botName,
|
||||||
@@ -135,8 +161,9 @@ export const tagMode: Mode = {
|
|||||||
...userAllowedMCPTools,
|
...userAllowedMCPTools,
|
||||||
];
|
];
|
||||||
|
|
||||||
// Add git commands when not using commit signing
|
// Add git commands when using git CLI (no API commit signing, or SSH signing)
|
||||||
if (!context.inputs.useCommitSigning) {
|
// SSH signing still uses git CLI, just with signing enabled
|
||||||
|
if (!useApiCommitSigning) {
|
||||||
tagModeTools.push(
|
tagModeTools.push(
|
||||||
"Bash(git add:*)",
|
"Bash(git add:*)",
|
||||||
"Bash(git commit:*)",
|
"Bash(git commit:*)",
|
||||||
@@ -147,7 +174,7 @@ export const tagMode: Mode = {
|
|||||||
"Bash(git rm:*)",
|
"Bash(git rm:*)",
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// When using commit signing, use MCP file ops tools
|
// When using API commit signing, use MCP file ops tools
|
||||||
tagModeTools.push(
|
tagModeTools.push(
|
||||||
"mcp__github_file_ops__commit_files",
|
"mcp__github_file_ops__commit_files",
|
||||||
"mcp__github_file_ops__delete_files",
|
"mcp__github_file_ops__delete_files",
|
||||||
|
|||||||
99
src/utils/branch-template.ts
Normal file
99
src/utils/branch-template.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Branch name template parsing and variable substitution utilities
|
||||||
|
*/
|
||||||
|
|
||||||
|
const NUM_DESCRIPTION_WORDS = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the first 5 words from a title and converts them to kebab-case
|
||||||
|
*/
|
||||||
|
function extractDescription(
|
||||||
|
title: string,
|
||||||
|
numWords: number = NUM_DESCRIPTION_WORDS,
|
||||||
|
): string {
|
||||||
|
if (!title || title.trim() === "") {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
return title
|
||||||
|
.trim()
|
||||||
|
.split(/\s+/)
|
||||||
|
.slice(0, numWords) // Only first `numWords` words
|
||||||
|
.join("-")
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9-]/g, "") // Remove non-alphanumeric except hyphens
|
||||||
|
.replace(/-+/g, "-") // Replace multiple hyphens with single
|
||||||
|
.replace(/^-|-$/g, ""); // Remove leading/trailing hyphens
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BranchTemplateVariables {
|
||||||
|
prefix: string;
|
||||||
|
entityType: string;
|
||||||
|
entityNumber: number;
|
||||||
|
timestamp: string;
|
||||||
|
sha?: string;
|
||||||
|
label?: string;
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces template variables in a branch name template
|
||||||
|
* Template format: {{variableName}}
|
||||||
|
*/
|
||||||
|
export function applyBranchTemplate(
|
||||||
|
template: string,
|
||||||
|
variables: BranchTemplateVariables,
|
||||||
|
): string {
|
||||||
|
let result = template;
|
||||||
|
|
||||||
|
// Replace each variable
|
||||||
|
Object.entries(variables).forEach(([key, value]) => {
|
||||||
|
const placeholder = `{{${key}}}`;
|
||||||
|
const replacement = value ? String(value) : "";
|
||||||
|
result = result.replaceAll(placeholder, replacement);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a branch name from the provided `template` and set of `variables`. Uses a default format if the template is empty or produces an empty result.
|
||||||
|
*/
|
||||||
|
export function generateBranchName(
|
||||||
|
template: string | undefined,
|
||||||
|
branchPrefix: string,
|
||||||
|
entityType: string,
|
||||||
|
entityNumber: number,
|
||||||
|
sha?: string,
|
||||||
|
label?: string,
|
||||||
|
title?: string,
|
||||||
|
): string {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const variables: BranchTemplateVariables = {
|
||||||
|
prefix: branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
timestamp: `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`,
|
||||||
|
sha: sha?.substring(0, 8), // First 8 characters of SHA
|
||||||
|
label: label || entityType, // Fall back to entityType if no label
|
||||||
|
description: title ? extractDescription(title) : undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (template?.trim()) {
|
||||||
|
const branchName = applyBranchTemplate(template, variables);
|
||||||
|
|
||||||
|
// Some templates could produce empty results- validate
|
||||||
|
if (branchName.trim().length > 0) return branchName;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Branch template '${template}' generated empty result, falling back to default format`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${variables.timestamp}`;
|
||||||
|
// Kubernetes compatible: lowercase, max 50 chars, alphanumeric and hyphens only
|
||||||
|
return branchName.toLowerCase().substring(0, 50);
|
||||||
|
}
|
||||||
32
src/utils/extract-user-request.ts
Normal file
32
src/utils/extract-user-request.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
/**
|
||||||
|
* Extracts the user's request from a trigger comment.
|
||||||
|
*
|
||||||
|
* Given a comment like "@claude /review-pr please check the auth module",
|
||||||
|
* this extracts "/review-pr please check the auth module".
|
||||||
|
*
|
||||||
|
* @param commentBody - The full comment body containing the trigger phrase
|
||||||
|
* @param triggerPhrase - The trigger phrase (e.g., "@claude")
|
||||||
|
* @returns The user's request (text after the trigger phrase), or null if not found
|
||||||
|
*/
|
||||||
|
export function extractUserRequest(
|
||||||
|
commentBody: string | undefined,
|
||||||
|
triggerPhrase: string,
|
||||||
|
): string | null {
|
||||||
|
if (!commentBody) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use string operations instead of regex for better performance and security
|
||||||
|
// (avoids potential ReDoS with large comment bodies)
|
||||||
|
const triggerIndex = commentBody
|
||||||
|
.toLowerCase()
|
||||||
|
.indexOf(triggerPhrase.toLowerCase());
|
||||||
|
if (triggerIndex === -1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const afterTrigger = commentBody
|
||||||
|
.substring(triggerIndex + triggerPhrase.length)
|
||||||
|
.trim();
|
||||||
|
return afterTrigger || null;
|
||||||
|
}
|
||||||
247
test/branch-template.test.ts
Normal file
247
test/branch-template.test.ts
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
import { describe, it, expect } from "bun:test";
|
||||||
|
import {
|
||||||
|
applyBranchTemplate,
|
||||||
|
generateBranchName,
|
||||||
|
} from "../src/utils/branch-template";
|
||||||
|
|
||||||
|
describe("branch template utilities", () => {
|
||||||
|
describe("applyBranchTemplate", () => {
|
||||||
|
it("should replace all template variables", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "feat/",
|
||||||
|
entityType: "issue",
|
||||||
|
entityNumber: 123,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
sha: "abcd1234",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("feat/issue-123-20240301-1430");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle custom templates with multiple variables", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}fix/{{entityType}}_{{entityNumber}}_{{timestamp}}_{{sha}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "claude-",
|
||||||
|
entityType: "pr",
|
||||||
|
entityNumber: 456,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
sha: "abcd1234",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("claude-fix/pr_456_20240301-1430_abcd1234");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle templates with missing variables gracefully", () => {
|
||||||
|
const template = "{{prefix}}{{entityType}}-{{missing}}-{{entityNumber}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "feat/",
|
||||||
|
entityType: "issue",
|
||||||
|
entityNumber: 123,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("feat/issue-{{missing}}-123");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("generateBranchName", () => {
|
||||||
|
it("should use custom template when provided", () => {
|
||||||
|
const template = "{{prefix}}custom-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/custom-issue_123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default format when template is empty", () => {
|
||||||
|
const result = generateBranchName("", "claude/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default format when template is undefined", () => {
|
||||||
|
const result = generateBranchName(undefined, "claude/", "pr", 456);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/pr-456-\d{8}-\d{4}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve custom template formatting (no automatic lowercase/truncation)", () => {
|
||||||
|
const template = "{{prefix}}UPPERCASE_Branch-Name_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "Feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe("Feature/UPPERCASE_Branch-Name_123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not truncate custom template results", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}very-long-branch-name-that-exceeds-the-maximum-allowed-length-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe(
|
||||||
|
"feature/very-long-branch-name-that-exceeds-the-maximum-allowed-length-123",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should apply Kubernetes-compatible transformations to default template only", () => {
|
||||||
|
const result = generateBranchName(undefined, "Feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^feature\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle SHA in template", () => {
|
||||||
|
const template = "{{prefix}}{{entityType}}-{{entityNumber}}-{{sha}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
789,
|
||||||
|
"abcdef123456",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/pr-789-abcdef12");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use label in template when provided", () => {
|
||||||
|
const template = "{{prefix}}{{label}}/{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
123,
|
||||||
|
undefined,
|
||||||
|
"bug",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/bug/123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to entityType when label template is used but no label provided", () => {
|
||||||
|
const template = "{{prefix}}{{label}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "fix/", "pr", 456);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/pr-456");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle template with both label and entityType", () => {
|
||||||
|
const template = "{{prefix}}{{label}}-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"dev/",
|
||||||
|
"issue",
|
||||||
|
789,
|
||||||
|
undefined,
|
||||||
|
"enhancement",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("dev/enhancement-issue_789");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use description in template when provided", () => {
|
||||||
|
const template = "{{prefix}}{{description}}/{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
123,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"Fix login bug with OAuth",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/fix-login-bug-with-oauth/123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle template with multiple variables including description", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}{{label}}/{{description}}-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"dev/",
|
||||||
|
"issue",
|
||||||
|
456,
|
||||||
|
undefined,
|
||||||
|
"bug",
|
||||||
|
"User authentication fails completely",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe(
|
||||||
|
"dev/bug/user-authentication-fails-completely-issue_456",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle description with special characters in template", () => {
|
||||||
|
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
789,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"Add: User Registration & Email Validation",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/add-user-registration-email-789");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should truncate descriptions to exactly 5 words", () => {
|
||||||
|
const result = generateBranchName(
|
||||||
|
"{{prefix}}{{description}}/{{entityNumber}}",
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
999,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"This is a very long title with many more than five words in it",
|
||||||
|
);
|
||||||
|
expect(result).toBe("feature/this-is-a-very-long/999");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty description in template", () => {
|
||||||
|
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"test/",
|
||||||
|
"issue",
|
||||||
|
101,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("test/-101");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to default format when template produces empty result", () => {
|
||||||
|
const template = "{{description}}"; // Will be empty if no title provided
|
||||||
|
const result = generateBranchName(template, "claude/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to default format when template produces only whitespace", () => {
|
||||||
|
const template = " {{description}} "; // Will be " " if description is empty
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
456,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^fix\/pr-456-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -61,6 +61,7 @@ describe("generatePrompt", () => {
|
|||||||
body: "This is a test PR",
|
body: "This is a test PR",
|
||||||
author: { login: "testuser" },
|
author: { login: "testuser" },
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: { nodes: [] },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
additions: 15,
|
additions: 15,
|
||||||
deletions: 5,
|
deletions: 5,
|
||||||
@@ -475,6 +476,7 @@ describe("generatePrompt", () => {
|
|||||||
body: "The login form is not working",
|
body: "The login form is not working",
|
||||||
author: { login: "testuser" },
|
author: { login: "testuser" },
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: { nodes: [] },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
comments: {
|
comments: {
|
||||||
nodes: [],
|
nodes: [],
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { describe, expect, it, jest } from "bun:test";
|
import { describe, expect, it, jest } from "bun:test";
|
||||||
import {
|
import {
|
||||||
extractTriggerTimestamp,
|
extractTriggerTimestamp,
|
||||||
|
extractOriginalTitle,
|
||||||
fetchGitHubData,
|
fetchGitHubData,
|
||||||
filterCommentsToTriggerTime,
|
filterCommentsToTriggerTime,
|
||||||
filterReviewsToTriggerTime,
|
filterReviewsToTriggerTime,
|
||||||
@@ -9,6 +10,7 @@ import {
|
|||||||
import {
|
import {
|
||||||
createMockContext,
|
createMockContext,
|
||||||
mockIssueCommentContext,
|
mockIssueCommentContext,
|
||||||
|
mockPullRequestCommentContext,
|
||||||
mockPullRequestReviewContext,
|
mockPullRequestReviewContext,
|
||||||
mockPullRequestReviewCommentContext,
|
mockPullRequestReviewCommentContext,
|
||||||
mockPullRequestOpenedContext,
|
mockPullRequestOpenedContext,
|
||||||
@@ -63,6 +65,47 @@ describe("extractTriggerTimestamp", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("extractOriginalTitle", () => {
|
||||||
|
it("should extract title from IssueCommentEvent on PR", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestCommentContext);
|
||||||
|
expect(title).toBe("Fix: Memory leak in user service");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from PullRequestReviewEvent", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestReviewContext);
|
||||||
|
expect(title).toBe("Refactor: Improve error handling in API layer");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from PullRequestReviewCommentEvent", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestReviewCommentContext);
|
||||||
|
expect(title).toBe("Performance: Optimize search algorithm");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from pull_request event", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestOpenedContext);
|
||||||
|
expect(title).toBe("Feature: Add user authentication");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from issues event", () => {
|
||||||
|
const title = extractOriginalTitle(mockIssueOpenedContext);
|
||||||
|
expect(title).toBe("Bug: Application crashes on startup");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return undefined for event without title", () => {
|
||||||
|
const context = createMockContext({
|
||||||
|
eventName: "issue_comment",
|
||||||
|
payload: {
|
||||||
|
comment: {
|
||||||
|
id: 123,
|
||||||
|
body: "test",
|
||||||
|
},
|
||||||
|
} as any,
|
||||||
|
});
|
||||||
|
const title = extractOriginalTitle(context);
|
||||||
|
expect(title).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe("filterCommentsToTriggerTime", () => {
|
describe("filterCommentsToTriggerTime", () => {
|
||||||
const createMockComment = (
|
const createMockComment = (
|
||||||
createdAt: string,
|
createdAt: string,
|
||||||
@@ -945,4 +988,115 @@ describe("fetchGitHubData integration with time filtering", () => {
|
|||||||
);
|
);
|
||||||
expect(hasPrBodyInMap).toBe(false);
|
expect(hasPrBodyInMap).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should use originalTitle when provided instead of fetched title", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Fetched Title From GraphQL",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
originalTitle: "Original Title From Webhook",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe("Original Title From Webhook");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use fetched title when originalTitle is not provided", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Fetched Title From GraphQL",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe("Fetched Title From GraphQL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use original title from webhook even if title was edited after trigger", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Edited Title (from GraphQL)",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
lastEditedAt: "2024-01-15T12:30:00Z", // Edited after trigger
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
triggerTime: "2024-01-15T12:00:00Z",
|
||||||
|
originalTitle: "Original Title (from webhook at trigger time)",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe(
|
||||||
|
"Original Title (from webhook at trigger time)",
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ describe("formatContext", () => {
|
|||||||
additions: 50,
|
additions: 50,
|
||||||
deletions: 30,
|
deletions: 30,
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: {
|
||||||
|
nodes: [],
|
||||||
|
},
|
||||||
commits: {
|
commits: {
|
||||||
totalCount: 3,
|
totalCount: 3,
|
||||||
nodes: [],
|
nodes: [],
|
||||||
@@ -63,6 +66,9 @@ Changed Files: 2 files`,
|
|||||||
author: { login: "test-user" },
|
author: { login: "test-user" },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: {
|
||||||
|
nodes: [],
|
||||||
|
},
|
||||||
comments: {
|
comments: {
|
||||||
nodes: [],
|
nodes: [],
|
||||||
},
|
},
|
||||||
|
|||||||
77
test/extract-user-request.test.ts
Normal file
77
test/extract-user-request.test.ts
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
import { extractUserRequest } from "../src/utils/extract-user-request";
|
||||||
|
|
||||||
|
describe("extractUserRequest", () => {
|
||||||
|
test("extracts text after @claude trigger", () => {
|
||||||
|
expect(extractUserRequest("@claude /review-pr", "@claude")).toBe(
|
||||||
|
"/review-pr",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("extracts slash command with arguments", () => {
|
||||||
|
expect(
|
||||||
|
extractUserRequest(
|
||||||
|
"@claude /review-pr please check the auth module",
|
||||||
|
"@claude",
|
||||||
|
),
|
||||||
|
).toBe("/review-pr please check the auth module");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles trigger phrase with extra whitespace", () => {
|
||||||
|
expect(extractUserRequest("@claude /review-pr", "@claude")).toBe(
|
||||||
|
"/review-pr",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles trigger phrase at start of multiline comment", () => {
|
||||||
|
const comment = `@claude /review-pr
|
||||||
|
Please review this PR carefully.
|
||||||
|
Focus on security issues.`;
|
||||||
|
expect(extractUserRequest(comment, "@claude")).toBe(
|
||||||
|
`/review-pr
|
||||||
|
Please review this PR carefully.
|
||||||
|
Focus on security issues.`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles trigger phrase in middle of text", () => {
|
||||||
|
expect(
|
||||||
|
extractUserRequest("Hey team, @claude can you review this?", "@claude"),
|
||||||
|
).toBe("can you review this?");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns null for empty comment body", () => {
|
||||||
|
expect(extractUserRequest("", "@claude")).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns null for undefined comment body", () => {
|
||||||
|
expect(extractUserRequest(undefined, "@claude")).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns null when trigger phrase not found", () => {
|
||||||
|
expect(extractUserRequest("Please review this PR", "@claude")).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns null when only trigger phrase with no request", () => {
|
||||||
|
expect(extractUserRequest("@claude", "@claude")).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles custom trigger phrase", () => {
|
||||||
|
expect(extractUserRequest("/claude help me", "/claude")).toBe("help me");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles trigger phrase with special regex characters", () => {
|
||||||
|
expect(
|
||||||
|
extractUserRequest("@claude[bot] do something", "@claude[bot]"),
|
||||||
|
).toBe("do something");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("is case insensitive", () => {
|
||||||
|
expect(extractUserRequest("@CLAUDE /review-pr", "@claude")).toBe(
|
||||||
|
"/review-pr",
|
||||||
|
);
|
||||||
|
expect(extractUserRequest("@Claude /review-pr", "@claude")).toBe(
|
||||||
|
"/review-pr",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
214
test/github-file-ops-path-validation.test.ts
Normal file
214
test/github-file-ops-path-validation.test.ts
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import { describe, expect, it, beforeAll, afterAll } from "bun:test";
|
||||||
|
import { validatePathWithinRepo } from "../src/mcp/path-validation";
|
||||||
|
import { resolve } from "path";
|
||||||
|
import { mkdir, writeFile, symlink, rm, realpath } from "fs/promises";
|
||||||
|
import { tmpdir } from "os";
|
||||||
|
|
||||||
|
describe("validatePathWithinRepo", () => {
|
||||||
|
// Use a real temp directory for tests that need filesystem access
|
||||||
|
let testDir: string;
|
||||||
|
let repoRoot: string;
|
||||||
|
let outsideDir: string;
|
||||||
|
// Real paths after symlink resolution (e.g., /tmp -> /private/tmp on macOS)
|
||||||
|
let realRepoRoot: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Create test directory structure
|
||||||
|
testDir = resolve(tmpdir(), `path-validation-test-${Date.now()}`);
|
||||||
|
repoRoot = resolve(testDir, "repo");
|
||||||
|
outsideDir = resolve(testDir, "outside");
|
||||||
|
|
||||||
|
await mkdir(repoRoot, { recursive: true });
|
||||||
|
await mkdir(resolve(repoRoot, "src"), { recursive: true });
|
||||||
|
await mkdir(outsideDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create test files
|
||||||
|
await writeFile(resolve(repoRoot, "file.txt"), "inside repo");
|
||||||
|
await writeFile(resolve(repoRoot, "src", "main.js"), "console.log('hi')");
|
||||||
|
await writeFile(resolve(outsideDir, "secret.txt"), "sensitive data");
|
||||||
|
|
||||||
|
// Get real paths after symlink resolution
|
||||||
|
realRepoRoot = await realpath(repoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
// Cleanup
|
||||||
|
await rm(testDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("valid paths", () => {
|
||||||
|
it("should accept simple relative paths", async () => {
|
||||||
|
const result = await validatePathWithinRepo("file.txt", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept nested relative paths", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept paths with single dot segments", async () => {
|
||||||
|
const result = await validatePathWithinRepo("./src/main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept paths that use .. but resolve inside repo", async () => {
|
||||||
|
// src/../file.txt resolves to file.txt which is still inside repo
|
||||||
|
const result = await validatePathWithinRepo("src/../file.txt", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept absolute paths within the repo root", async () => {
|
||||||
|
const absolutePath = resolve(repoRoot, "file.txt");
|
||||||
|
const result = await validatePathWithinRepo(absolutePath, repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept the repo root itself", async () => {
|
||||||
|
const result = await validatePathWithinRepo(".", repoRoot);
|
||||||
|
expect(result).toBe(realRepoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle new files (non-existent) in valid directories", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/newfile.js", repoRoot);
|
||||||
|
// For non-existent files, we validate the parent but return the initial path
|
||||||
|
// (can't realpath a file that doesn't exist yet)
|
||||||
|
expect(result).toBe(resolve(repoRoot, "src/newfile.js"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("path traversal attacks", () => {
|
||||||
|
it("should reject simple parent directory traversal", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("../outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject deeply nested parent directory traversal", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("../../../etc/passwd", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject traversal hidden within path", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("src/../../outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject traversal at the end of path", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("src/../..", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject absolute paths outside the repo root", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("/etc/passwd", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject absolute paths to sibling directories", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo(resolve(outsideDir, "secret.txt"), repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("symlink attacks", () => {
|
||||||
|
it("should reject symlinks pointing outside the repo", async () => {
|
||||||
|
// Create a symlink inside the repo that points to a file outside
|
||||||
|
const symlinkPath = resolve(repoRoot, "evil-link");
|
||||||
|
await symlink(resolve(outsideDir, "secret.txt"), symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// The symlink path looks like it's inside the repo, but points outside
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("evil-link", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject symlinks to parent directories", async () => {
|
||||||
|
// Create a symlink to the parent directory
|
||||||
|
const symlinkPath = resolve(repoRoot, "parent-link");
|
||||||
|
await symlink(testDir, symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("parent-link/outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept symlinks that resolve within the repo", async () => {
|
||||||
|
// Create a symlink inside the repo that points to another file inside
|
||||||
|
const symlinkPath = resolve(repoRoot, "good-link");
|
||||||
|
await symlink(resolve(repoRoot, "file.txt"), symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await validatePathWithinRepo("good-link", repoRoot);
|
||||||
|
// Should resolve to the actual file location
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject directory symlinks that escape the repo", async () => {
|
||||||
|
// Create a symlink to outside directory
|
||||||
|
const symlinkPath = resolve(repoRoot, "escape-dir");
|
||||||
|
await symlink(outsideDir, symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("escape-dir/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("edge cases", () => {
|
||||||
|
it("should handle empty path (current directory)", async () => {
|
||||||
|
const result = await validatePathWithinRepo("", repoRoot);
|
||||||
|
expect(result).toBe(realRepoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle paths with multiple consecutive slashes", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src//main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle paths with trailing slashes", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject prefix attack (repo root as prefix but not parent)", async () => {
|
||||||
|
// Create a sibling directory with repo name as prefix
|
||||||
|
const evilDir = repoRoot + "-evil";
|
||||||
|
await mkdir(evilDir, { recursive: true });
|
||||||
|
await writeFile(resolve(evilDir, "file.txt"), "evil");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo(resolve(evilDir, "file.txt"), repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(evilDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error for non-existent repo root", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("file.txt", "/nonexistent/repo"),
|
||||||
|
).rejects.toThrow(/does not exist/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -32,6 +32,7 @@ describe("prepareMcpConfig", () => {
|
|||||||
branchPrefix: "",
|
branchPrefix: "",
|
||||||
useStickyComment: false,
|
useStickyComment: false,
|
||||||
useCommitSigning: false,
|
useCommitSigning: false,
|
||||||
|
sshSigningKey: "",
|
||||||
botId: String(CLAUDE_APP_BOT_ID),
|
botId: String(CLAUDE_APP_BOT_ID),
|
||||||
botName: CLAUDE_BOT_LOGIN,
|
botName: CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: "",
|
allowedBots: "",
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ const defaultInputs = {
|
|||||||
branchPrefix: "claude/",
|
branchPrefix: "claude/",
|
||||||
useStickyComment: false,
|
useStickyComment: false,
|
||||||
useCommitSigning: false,
|
useCommitSigning: false,
|
||||||
|
sshSigningKey: "",
|
||||||
botId: String(CLAUDE_APP_BOT_ID),
|
botId: String(CLAUDE_APP_BOT_ID),
|
||||||
botName: CLAUDE_BOT_LOGIN,
|
botName: CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: "",
|
allowedBots: "",
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ describe("detectMode with enhanced routing", () => {
|
|||||||
branchPrefix: "claude/",
|
branchPrefix: "claude/",
|
||||||
useStickyComment: false,
|
useStickyComment: false,
|
||||||
useCommitSigning: false,
|
useCommitSigning: false,
|
||||||
|
sshSigningKey: "",
|
||||||
botId: "123456",
|
botId: "123456",
|
||||||
botName: "claude-bot",
|
botName: "claude-bot",
|
||||||
allowedBots: "",
|
allowedBots: "",
|
||||||
|
|||||||
@@ -68,6 +68,7 @@ describe("checkWritePermissions", () => {
|
|||||||
branchPrefix: "claude/",
|
branchPrefix: "claude/",
|
||||||
useStickyComment: false,
|
useStickyComment: false,
|
||||||
useCommitSigning: false,
|
useCommitSigning: false,
|
||||||
|
sshSigningKey: "",
|
||||||
botId: String(CLAUDE_APP_BOT_ID),
|
botId: String(CLAUDE_APP_BOT_ID),
|
||||||
botName: CLAUDE_BOT_LOGIN,
|
botName: CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: "",
|
allowedBots: "",
|
||||||
|
|||||||
@@ -87,6 +87,7 @@ describe("pull_request_target event support", () => {
|
|||||||
},
|
},
|
||||||
comments: { nodes: [] },
|
comments: { nodes: [] },
|
||||||
reviews: { nodes: [] },
|
reviews: { nodes: [] },
|
||||||
|
labels: { nodes: [] },
|
||||||
},
|
},
|
||||||
comments: [],
|
comments: [],
|
||||||
changedFiles: [],
|
changedFiles: [],
|
||||||
|
|||||||
250
test/ssh-signing.test.ts
Normal file
250
test/ssh-signing.test.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
import {
|
||||||
|
describe,
|
||||||
|
test,
|
||||||
|
expect,
|
||||||
|
afterEach,
|
||||||
|
beforeAll,
|
||||||
|
afterAll,
|
||||||
|
} from "bun:test";
|
||||||
|
import { mkdir, writeFile, rm, readFile, stat } from "fs/promises";
|
||||||
|
import { join } from "path";
|
||||||
|
import { tmpdir } from "os";
|
||||||
|
|
||||||
|
describe("SSH Signing", () => {
|
||||||
|
// Use a temp directory for tests
|
||||||
|
const testTmpDir = join(tmpdir(), "claude-ssh-signing-test");
|
||||||
|
const testSshDir = join(testTmpDir, ".ssh");
|
||||||
|
const testKeyPath = join(testSshDir, "claude_signing_key");
|
||||||
|
const testKey =
|
||||||
|
"-----BEGIN OPENSSH PRIVATE KEY-----\ntest-key-content\n-----END OPENSSH PRIVATE KEY-----";
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await mkdir(testTmpDir, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await rm(testTmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
// Clean up test key if it exists
|
||||||
|
try {
|
||||||
|
await rm(testKeyPath, { force: true });
|
||||||
|
} catch {
|
||||||
|
// Ignore cleanup errors
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setupSshSigning file operations", () => {
|
||||||
|
test("should write key file atomically with correct permissions", async () => {
|
||||||
|
// Create the directory with secure permissions (same as setupSshSigning does)
|
||||||
|
await mkdir(testSshDir, { recursive: true, mode: 0o700 });
|
||||||
|
|
||||||
|
// Write key atomically with proper permissions (same as setupSshSigning does)
|
||||||
|
await writeFile(testKeyPath, testKey, { mode: 0o600 });
|
||||||
|
|
||||||
|
// Verify key was written
|
||||||
|
const keyContent = await readFile(testKeyPath, "utf-8");
|
||||||
|
expect(keyContent).toBe(testKey);
|
||||||
|
|
||||||
|
// Verify permissions (0o600 = 384 in decimal for permission bits only)
|
||||||
|
const stats = await stat(testKeyPath);
|
||||||
|
const permissions = stats.mode & 0o777; // Get only permission bits
|
||||||
|
expect(permissions).toBe(0o600);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should create .ssh directory with secure permissions", async () => {
|
||||||
|
// Clean up first
|
||||||
|
await rm(testSshDir, { recursive: true, force: true });
|
||||||
|
|
||||||
|
// Create directory with secure permissions (same as setupSshSigning does)
|
||||||
|
await mkdir(testSshDir, { recursive: true, mode: 0o700 });
|
||||||
|
|
||||||
|
// Verify directory exists
|
||||||
|
const dirStats = await stat(testSshDir);
|
||||||
|
expect(dirStats.isDirectory()).toBe(true);
|
||||||
|
|
||||||
|
// Verify directory permissions
|
||||||
|
const dirPermissions = dirStats.mode & 0o777;
|
||||||
|
expect(dirPermissions).toBe(0o700);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setupSshSigning validation", () => {
|
||||||
|
test("should reject empty SSH key", () => {
|
||||||
|
const emptyKey = "";
|
||||||
|
expect(() => {
|
||||||
|
if (!emptyKey.trim()) {
|
||||||
|
throw new Error("SSH signing key cannot be empty");
|
||||||
|
}
|
||||||
|
}).toThrow("SSH signing key cannot be empty");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should reject whitespace-only SSH key", () => {
|
||||||
|
const whitespaceKey = " \n\t ";
|
||||||
|
expect(() => {
|
||||||
|
if (!whitespaceKey.trim()) {
|
||||||
|
throw new Error("SSH signing key cannot be empty");
|
||||||
|
}
|
||||||
|
}).toThrow("SSH signing key cannot be empty");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should reject invalid SSH key format", () => {
|
||||||
|
const invalidKey = "not a valid key";
|
||||||
|
expect(() => {
|
||||||
|
if (
|
||||||
|
!invalidKey.includes("BEGIN") ||
|
||||||
|
!invalidKey.includes("PRIVATE KEY")
|
||||||
|
) {
|
||||||
|
throw new Error("Invalid SSH private key format");
|
||||||
|
}
|
||||||
|
}).toThrow("Invalid SSH private key format");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should accept valid SSH key format", () => {
|
||||||
|
const validKey =
|
||||||
|
"-----BEGIN OPENSSH PRIVATE KEY-----\nkey-content\n-----END OPENSSH PRIVATE KEY-----";
|
||||||
|
expect(() => {
|
||||||
|
if (!validKey.trim()) {
|
||||||
|
throw new Error("SSH signing key cannot be empty");
|
||||||
|
}
|
||||||
|
if (!validKey.includes("BEGIN") || !validKey.includes("PRIVATE KEY")) {
|
||||||
|
throw new Error("Invalid SSH private key format");
|
||||||
|
}
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("cleanupSshSigning file operations", () => {
|
||||||
|
test("should remove the signing key file", async () => {
|
||||||
|
// Create the key file first
|
||||||
|
await mkdir(testSshDir, { recursive: true });
|
||||||
|
await writeFile(testKeyPath, testKey, { mode: 0o600 });
|
||||||
|
|
||||||
|
// Verify it exists
|
||||||
|
const existsBefore = await stat(testKeyPath)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
expect(existsBefore).toBe(true);
|
||||||
|
|
||||||
|
// Clean up (same operation as cleanupSshSigning)
|
||||||
|
await rm(testKeyPath, { force: true });
|
||||||
|
|
||||||
|
// Verify it's gone
|
||||||
|
const existsAfter = await stat(testKeyPath)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
expect(existsAfter).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("should not throw if key file does not exist", async () => {
|
||||||
|
// Make sure file doesn't exist
|
||||||
|
await rm(testKeyPath, { force: true });
|
||||||
|
|
||||||
|
// Should not throw (rm with force: true doesn't throw on missing files)
|
||||||
|
await expect(rm(testKeyPath, { force: true })).resolves.toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("SSH Signing Mode Detection", () => {
|
||||||
|
test("sshSigningKey should take precedence over useCommitSigning", () => {
|
||||||
|
// When both are set, SSH signing takes precedence
|
||||||
|
const sshSigningKey = "test-key";
|
||||||
|
const useCommitSigning = true;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useSshSigning).toBe(true);
|
||||||
|
expect(useApiCommitSigning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("useCommitSigning should work when sshSigningKey is not set", () => {
|
||||||
|
const sshSigningKey = "";
|
||||||
|
const useCommitSigning = true;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useSshSigning).toBe(false);
|
||||||
|
expect(useApiCommitSigning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("neither signing method when both are false/empty", () => {
|
||||||
|
const sshSigningKey = "";
|
||||||
|
const useCommitSigning = false;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useSshSigning).toBe(false);
|
||||||
|
expect(useApiCommitSigning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("git CLI tools should be used when sshSigningKey is set", () => {
|
||||||
|
// This tests the logic in tag mode for tool selection
|
||||||
|
const sshSigningKey = "test-key";
|
||||||
|
const useCommitSigning = true; // Even if this is true
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
// When SSH signing is used, we should use git CLI (not API)
|
||||||
|
const shouldUseGitCli = !useApiCommitSigning;
|
||||||
|
expect(shouldUseGitCli).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("MCP file ops should only be used with API commit signing", () => {
|
||||||
|
// Case 1: API commit signing
|
||||||
|
{
|
||||||
|
const sshSigningKey = "";
|
||||||
|
const useCommitSigning = true;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useApiCommitSigning).toBe(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 2: SSH signing (should NOT use API)
|
||||||
|
{
|
||||||
|
const sshSigningKey = "test-key";
|
||||||
|
const useCommitSigning = true;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useApiCommitSigning).toBe(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 3: No signing (should NOT use API)
|
||||||
|
{
|
||||||
|
const sshSigningKey = "";
|
||||||
|
const useCommitSigning = false;
|
||||||
|
|
||||||
|
const useSshSigning = !!sshSigningKey;
|
||||||
|
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||||
|
|
||||||
|
expect(useApiCommitSigning).toBe(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Context parsing", () => {
|
||||||
|
test("sshSigningKey should be parsed from environment", () => {
|
||||||
|
// Test that context.ts parses SSH_SIGNING_KEY correctly
|
||||||
|
const testCases = [
|
||||||
|
{ env: "test-key", expected: "test-key" },
|
||||||
|
{ env: "", expected: "" },
|
||||||
|
{ env: undefined, expected: "" },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const { env, expected } of testCases) {
|
||||||
|
const result = env || "";
|
||||||
|
expect(result).toBe(expected);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user