mirror of
https://github.com/anthropics/claude-code-action.git
synced 2026-01-23 23:14:13 +08:00
Compare commits
2 Commits
v1.0.29
...
boris/add-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
91a8d6c8d8 | ||
|
|
8151408b90 |
21
action.yml
21
action.yml
@@ -23,10 +23,6 @@ inputs:
|
||||
description: "The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format)"
|
||||
required: false
|
||||
default: "claude/"
|
||||
branch_name_template:
|
||||
description: "Template for branch naming. Available variables: {{prefix}}, {{entityType}}, {{entityNumber}}, {{timestamp}}, {{sha}}, {{label}}, {{description}}. {{label}} will be first label from the issue/PR, or {{entityType}} as a fallback. {{description}} will be the first 5 words of the issue/PR title in kebab-case. Default: '{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}'"
|
||||
required: false
|
||||
default: ""
|
||||
allowed_bots:
|
||||
description: "Comma-separated list of allowed bot usernames, or '*' to allow all bots. Empty string (default) allows no bots."
|
||||
required: false
|
||||
@@ -85,10 +81,6 @@ inputs:
|
||||
description: "Enable commit signing using GitHub's commit signature verification. When false, Claude uses standard git commands"
|
||||
required: false
|
||||
default: "false"
|
||||
ssh_signing_key:
|
||||
description: "SSH private key for signing commits. When provided, git will be configured to use SSH signing. Takes precedence over use_commit_signing."
|
||||
required: false
|
||||
default: ""
|
||||
bot_id:
|
||||
description: "GitHub user ID to use for git operations (defaults to Claude's bot ID)"
|
||||
required: false
|
||||
@@ -182,7 +174,6 @@ runs:
|
||||
LABEL_TRIGGER: ${{ inputs.label_trigger }}
|
||||
BASE_BRANCH: ${{ inputs.base_branch }}
|
||||
BRANCH_PREFIX: ${{ inputs.branch_prefix }}
|
||||
BRANCH_NAME_TEMPLATE: ${{ inputs.branch_name_template }}
|
||||
OVERRIDE_GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||
ALLOWED_BOTS: ${{ inputs.allowed_bots }}
|
||||
ALLOWED_NON_WRITE_USERS: ${{ inputs.allowed_non_write_users }}
|
||||
@@ -190,7 +181,6 @@ runs:
|
||||
USE_STICKY_COMMENT: ${{ inputs.use_sticky_comment }}
|
||||
DEFAULT_WORKFLOW_TOKEN: ${{ github.token }}
|
||||
USE_COMMIT_SIGNING: ${{ inputs.use_commit_signing }}
|
||||
SSH_SIGNING_KEY: ${{ inputs.ssh_signing_key }}
|
||||
BOT_ID: ${{ inputs.bot_id }}
|
||||
BOT_NAME: ${{ inputs.bot_name }}
|
||||
TRACK_PROGRESS: ${{ inputs.track_progress }}
|
||||
@@ -213,13 +203,12 @@ runs:
|
||||
|
||||
# Install Claude Code if no custom executable is provided
|
||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||
CLAUDE_CODE_VERSION="2.1.1"
|
||||
CLAUDE_CODE_VERSION="2.0.76"
|
||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||
for attempt in 1 2 3; do
|
||||
echo "Installation attempt $attempt..."
|
||||
if command -v timeout &> /dev/null; then
|
||||
# Use --foreground to kill entire process group on timeout, --kill-after to send SIGKILL if SIGTERM fails
|
||||
timeout --foreground --kill-after=10 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||
timeout 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||
else
|
||||
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
||||
fi
|
||||
@@ -345,12 +334,6 @@ runs:
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Cleanup SSH signing key
|
||||
if: always() && inputs.ssh_signing_key != ''
|
||||
shell: bash
|
||||
run: |
|
||||
bun run ${GITHUB_ACTION_PATH}/src/entrypoints/cleanup-ssh-signing.ts
|
||||
|
||||
- name: Revoke app token
|
||||
if: always() && inputs.github_token == '' && steps.prepare.outputs.skipped_due_to_workflow_validation_mismatch != 'true'
|
||||
shell: bash
|
||||
|
||||
@@ -124,13 +124,12 @@ runs:
|
||||
PATH_TO_CLAUDE_CODE_EXECUTABLE: ${{ inputs.path_to_claude_code_executable }}
|
||||
run: |
|
||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||
CLAUDE_CODE_VERSION="2.1.1"
|
||||
CLAUDE_CODE_VERSION="2.0.76"
|
||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||
for attempt in 1 2 3; do
|
||||
echo "Installation attempt $attempt..."
|
||||
if command -v timeout &> /dev/null; then
|
||||
# Use --foreground to kill entire process group on timeout, --kill-after to send SIGKILL if SIGTERM fails
|
||||
timeout --foreground --kill-after=10 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||
timeout 120 bash -c "curl -fsSL https://claude.ai/install.sh | bash -s -- $CLAUDE_CODE_VERSION" && break
|
||||
else
|
||||
curl -fsSL https://claude.ai/install.sh | bash -s -- "$CLAUDE_CODE_VERSION" && break
|
||||
fi
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"name": "@anthropic-ai/claude-code-base-action",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
||||
"shell-quote": "^1.8.3",
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -27,7 +27,7 @@
|
||||
|
||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||
|
||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.1", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-ZJO/TWcrFHGQTGHJDJl03mWozirWMBqdNpbuAgxZpLaHj2N5vyMxoeYiJC+7M0+gOSs7bjwKJLKTZcHGtGa34g=="],
|
||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
||||
|
||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
||||
"shell-quote": "^1.8.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -8,47 +8,26 @@ const MARKETPLACE_URL_REGEX =
|
||||
/^https:\/\/[a-zA-Z0-9\-._~:/?#[\]@!$&'()*+,;=%]+\.git$/;
|
||||
|
||||
/**
|
||||
* Checks if a marketplace input is a local path (not a URL)
|
||||
* @param input - The marketplace input to check
|
||||
* @returns true if the input is a local path, false if it's a URL
|
||||
* Validates a marketplace URL for security issues
|
||||
* @param url - The marketplace URL to validate
|
||||
* @throws {Error} If the URL is invalid
|
||||
*/
|
||||
function isLocalPath(input: string): boolean {
|
||||
// Local paths start with ./, ../, /, or a drive letter (Windows)
|
||||
return (
|
||||
input.startsWith("./") ||
|
||||
input.startsWith("../") ||
|
||||
input.startsWith("/") ||
|
||||
/^[a-zA-Z]:[\\\/]/.test(input)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a marketplace URL or local path
|
||||
* @param input - The marketplace URL or local path to validate
|
||||
* @throws {Error} If the input is invalid
|
||||
*/
|
||||
function validateMarketplaceInput(input: string): void {
|
||||
const normalized = input.trim();
|
||||
function validateMarketplaceUrl(url: string): void {
|
||||
const normalized = url.trim();
|
||||
|
||||
if (!normalized) {
|
||||
throw new Error("Marketplace URL or path cannot be empty");
|
||||
throw new Error("Marketplace URL cannot be empty");
|
||||
}
|
||||
|
||||
// Local paths are passed directly to Claude Code which handles them
|
||||
if (isLocalPath(normalized)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate as URL
|
||||
if (!MARKETPLACE_URL_REGEX.test(normalized)) {
|
||||
throw new Error(`Invalid marketplace URL format: ${input}`);
|
||||
throw new Error(`Invalid marketplace URL format: ${url}`);
|
||||
}
|
||||
|
||||
// Additional check for valid URL structure
|
||||
try {
|
||||
new URL(normalized);
|
||||
} catch {
|
||||
throw new Error(`Invalid marketplace URL: ${input}`);
|
||||
throw new Error(`Invalid marketplace URL: ${url}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,9 +55,9 @@ function validatePluginName(pluginName: string): void {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a newline-separated list of marketplace URLs or local paths and return an array of validated entries
|
||||
* @param marketplaces - Newline-separated list of marketplace Git URLs or local paths
|
||||
* @returns Array of validated marketplace URLs or paths (empty array if none provided)
|
||||
* Parse a newline-separated list of marketplace URLs and return an array of validated URLs
|
||||
* @param marketplaces - Newline-separated list of marketplace Git URLs
|
||||
* @returns Array of validated marketplace URLs (empty array if none provided)
|
||||
*/
|
||||
function parseMarketplaces(marketplaces?: string): string[] {
|
||||
const trimmed = marketplaces?.trim();
|
||||
@@ -87,14 +66,14 @@ function parseMarketplaces(marketplaces?: string): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Split by newline and process each entry
|
||||
// Split by newline and process each URL
|
||||
return trimmed
|
||||
.split("\n")
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => {
|
||||
if (entry.length === 0) return false;
|
||||
.map((url) => url.trim())
|
||||
.filter((url) => {
|
||||
if (url.length === 0) return false;
|
||||
|
||||
validateMarketplaceInput(entry);
|
||||
validateMarketplaceUrl(url);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
@@ -184,26 +163,26 @@ async function installPlugin(
|
||||
/**
|
||||
* Adds a Claude Code plugin marketplace
|
||||
* @param claudeExecutable - Path to the Claude executable
|
||||
* @param marketplace - The marketplace Git URL or local path to add
|
||||
* @param marketplaceUrl - The marketplace Git URL to add
|
||||
* @returns Promise that resolves when the marketplace add command completes
|
||||
* @throws {Error} If the command fails to execute
|
||||
*/
|
||||
async function addMarketplace(
|
||||
claudeExecutable: string,
|
||||
marketplace: string,
|
||||
marketplaceUrl: string,
|
||||
): Promise<void> {
|
||||
console.log(`Adding marketplace: ${marketplace}`);
|
||||
console.log(`Adding marketplace: ${marketplaceUrl}`);
|
||||
|
||||
return executeClaudeCommand(
|
||||
claudeExecutable,
|
||||
["plugin", "marketplace", "add", marketplace],
|
||||
`Failed to add marketplace '${marketplace}'`,
|
||||
["plugin", "marketplace", "add", marketplaceUrl],
|
||||
`Failed to add marketplace '${marketplaceUrl}'`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Installs Claude Code plugins from a newline-separated list
|
||||
* @param marketplacesInput - Newline-separated list of marketplace Git URLs or local paths
|
||||
* @param marketplacesInput - Newline-separated list of marketplace Git URLs
|
||||
* @param pluginsInput - Newline-separated list of plugin names
|
||||
* @param claudeExecutable - Path to the Claude executable (defaults to "claude")
|
||||
* @returns Promise that resolves when all plugins are installed
|
||||
|
||||
@@ -212,8 +212,6 @@ export function parseSdkOptions(options: ClaudeOptions): ParsedSdkOptions {
|
||||
if (process.env.INPUT_ACTION_INPUTS_PRESENT) {
|
||||
env.GITHUB_ACTION_INPUTS = process.env.INPUT_ACTION_INPUTS_PRESENT;
|
||||
}
|
||||
// Ensure SDK path uses the same entrypoint as the CLI path
|
||||
env.CLAUDE_CODE_ENTRYPOINT = "claude-code-github-action";
|
||||
|
||||
// Build system prompt option - default to claude_code preset
|
||||
let systemPrompt: SdkOptions["systemPrompt"];
|
||||
|
||||
@@ -1,81 +1,14 @@
|
||||
import * as core from "@actions/core";
|
||||
import { readFile, writeFile, access } from "fs/promises";
|
||||
import { dirname, join } from "path";
|
||||
import { readFile, writeFile } from "fs/promises";
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import type {
|
||||
SDKMessage,
|
||||
SDKResultMessage,
|
||||
SDKUserMessage,
|
||||
} from "@anthropic-ai/claude-agent-sdk";
|
||||
import type { ParsedSdkOptions } from "./parse-sdk-options";
|
||||
|
||||
const EXECUTION_FILE = `${process.env.RUNNER_TEMP}/claude-execution-output.json`;
|
||||
|
||||
/** Filename for the user request file, written by prompt generation */
|
||||
const USER_REQUEST_FILENAME = "claude-user-request.txt";
|
||||
|
||||
/**
|
||||
* Check if a file exists
|
||||
*/
|
||||
async function fileExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await access(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a prompt configuration for the SDK.
|
||||
* If a user request file exists alongside the prompt file, returns a multi-block
|
||||
* SDKUserMessage that enables slash command processing in the CLI.
|
||||
* Otherwise, returns the prompt as a simple string.
|
||||
*/
|
||||
async function createPromptConfig(
|
||||
promptPath: string,
|
||||
showFullOutput: boolean,
|
||||
): Promise<string | AsyncIterable<SDKUserMessage>> {
|
||||
const promptContent = await readFile(promptPath, "utf-8");
|
||||
|
||||
// Check for user request file in the same directory
|
||||
const userRequestPath = join(dirname(promptPath), USER_REQUEST_FILENAME);
|
||||
const hasUserRequest = await fileExists(userRequestPath);
|
||||
|
||||
if (!hasUserRequest) {
|
||||
// No user request file - use simple string prompt
|
||||
return promptContent;
|
||||
}
|
||||
|
||||
// User request file exists - create multi-block message
|
||||
const userRequest = await readFile(userRequestPath, "utf-8");
|
||||
if (showFullOutput) {
|
||||
console.log("Using multi-block message with user request:", userRequest);
|
||||
} else {
|
||||
console.log("Using multi-block message with user request (content hidden)");
|
||||
}
|
||||
|
||||
// Create an async generator that yields a single multi-block message
|
||||
// The context/instructions go first, then the user's actual request last
|
||||
// This allows the CLI to detect and process slash commands in the user request
|
||||
async function* createMultiBlockMessage(): AsyncGenerator<SDKUserMessage> {
|
||||
yield {
|
||||
type: "user",
|
||||
session_id: "",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: promptContent }, // Instructions + GitHub context
|
||||
{ type: "text", text: userRequest }, // User's request (may be a slash command)
|
||||
],
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
};
|
||||
}
|
||||
|
||||
return createMultiBlockMessage();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes SDK output to match CLI sanitization behavior
|
||||
*/
|
||||
@@ -130,8 +63,7 @@ export async function runClaudeWithSdk(
|
||||
promptPath: string,
|
||||
{ sdkOptions, showFullOutput, hasJsonSchema }: ParsedSdkOptions,
|
||||
): Promise<void> {
|
||||
// Create prompt configuration - may be a string or multi-block message
|
||||
const prompt = await createPromptConfig(promptPath, showFullOutput);
|
||||
const prompt = await readFile(promptPath, "utf-8");
|
||||
|
||||
if (!showFullOutput) {
|
||||
console.log(
|
||||
|
||||
@@ -596,111 +596,4 @@ describe("installPlugins", () => {
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
// Local marketplace path tests
|
||||
test("should accept local marketplace path with ./", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("./my-local-marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "./my-local-marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
"claude",
|
||||
["plugin", "install", "test-plugin"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept local marketplace path with absolute Unix path", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("/home/user/my-marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "/home/user/my-marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept local marketplace path with Windows absolute path", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("C:\\Users\\user\\marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "C:\\Users\\user\\marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept mixed local and remote marketplaces", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins(
|
||||
"./local-marketplace\nhttps://github.com/user/remote.git",
|
||||
"test-plugin",
|
||||
);
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(3);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "./local-marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "https://github.com/user/remote.git"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept local path with ../ (parent directory)", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("../shared-plugins/marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "../shared-plugins/marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept local path with nested directories", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("./plugins/my-org/my-marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "./plugins/my-org/my-marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
|
||||
test("should accept local path with dots in directory name", async () => {
|
||||
const spy = createMockSpawn();
|
||||
await installPlugins("./my.plugin.marketplace", "test-plugin");
|
||||
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"claude",
|
||||
["plugin", "marketplace", "add", "./my.plugin.marketplace"],
|
||||
{ stdio: "inherit" },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
"name": "mcp-test",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.24.0"
|
||||
"@modelcontextprotocol/sdk": "^1.11.0"
|
||||
}
|
||||
}
|
||||
|
||||
4
bun.lock
4
bun.lock
@@ -7,7 +7,7 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||
"@octokit/graphql": "^8.2.2",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
@@ -37,7 +37,7 @@
|
||||
|
||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||
|
||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.1", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-ZJO/TWcrFHGQTGHJDJl03mWozirWMBqdNpbuAgxZpLaHj2N5vyMxoeYiJC+7M0+gOSs7bjwKJLKTZcHGtGa34g=="],
|
||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
||||
|
||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ This action supports the following GitHub events ([learn more GitHub event trigg
|
||||
- `issues` - When issues are opened or assigned
|
||||
- `pull_request_review` - When PR reviews are submitted
|
||||
- `pull_request_review_comment` - When comments are made on PR reviews
|
||||
- `push` - When commits are pushed to a branch
|
||||
- `repository_dispatch` - Custom events triggered via API
|
||||
- `workflow_dispatch` - Manual workflow triggers (coming soon)
|
||||
|
||||
@@ -120,3 +121,42 @@ For more control over Claude's behavior, use the `claude_args` input to pass CLI
|
||||
```
|
||||
|
||||
This provides full access to Claude Code CLI capabilities while maintaining the simplified action interface.
|
||||
|
||||
## Auto-Rebase PRs on Push
|
||||
|
||||
Automatically keep PRs up to date when the main branch is updated:
|
||||
|
||||
```yaml
|
||||
name: Auto-Rebase PRs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
rebase-prs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
prompt: |
|
||||
Find all open PRs that are behind main and merge main into them.
|
||||
For each PR:
|
||||
1. Check out the PR branch
|
||||
2. Merge main into the branch
|
||||
3. Push the updated branch
|
||||
|
||||
Skip any PRs with merge conflicts - just report them.
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
```
|
||||
|
||||
This workflow triggers whenever commits are pushed to main and uses Claude to automatically merge main into any stale PR branches, keeping them up to date.
|
||||
|
||||
@@ -38,64 +38,7 @@ The following permissions are requested but not yet actively used. These will en
|
||||
|
||||
## Commit Signing
|
||||
|
||||
By default, commits made by Claude are unsigned. You can enable commit signing using one of two methods:
|
||||
|
||||
### Option 1: GitHub API Commit Signing (use_commit_signing)
|
||||
|
||||
This uses GitHub's API to create commits, which automatically signs them as verified from the GitHub App:
|
||||
|
||||
```yaml
|
||||
- uses: anthropics/claude-code-action@main
|
||||
with:
|
||||
use_commit_signing: true
|
||||
```
|
||||
|
||||
This is the simplest option and requires no additional setup. However, because it uses the GitHub API instead of git CLI, it cannot perform complex git operations like rebasing, cherry-picking, or interactive history manipulation.
|
||||
|
||||
### Option 2: SSH Signing Key (ssh_signing_key)
|
||||
|
||||
This uses an SSH key to sign commits via git CLI. Use this option when you need both signed commits AND standard git operations (rebasing, cherry-picking, etc.):
|
||||
|
||||
```yaml
|
||||
- uses: anthropics/claude-code-action@main
|
||||
with:
|
||||
ssh_signing_key: ${{ secrets.SSH_SIGNING_KEY }}
|
||||
bot_id: "YOUR_GITHUB_USER_ID"
|
||||
bot_name: "YOUR_GITHUB_USERNAME"
|
||||
```
|
||||
|
||||
Commits will show as verified and attributed to the GitHub account that owns the signing key.
|
||||
|
||||
**Setup steps:**
|
||||
|
||||
1. Generate an SSH key pair for signing:
|
||||
|
||||
```bash
|
||||
ssh-keygen -t ed25519 -f ~/.ssh/signing_key -N "" -C "commit signing key"
|
||||
```
|
||||
|
||||
2. Add the **public key** to your GitHub account:
|
||||
|
||||
- Go to GitHub → Settings → SSH and GPG keys
|
||||
- Click "New SSH key"
|
||||
- Select **Key type: Signing Key** (important)
|
||||
- Paste the contents of `~/.ssh/signing_key.pub`
|
||||
|
||||
3. Add the **private key** to your repository secrets:
|
||||
|
||||
- Go to your repo → Settings → Secrets and variables → Actions
|
||||
- Create a new secret named `SSH_SIGNING_KEY`
|
||||
- Paste the contents of `~/.ssh/signing_key`
|
||||
|
||||
4. Get your GitHub user ID:
|
||||
|
||||
```bash
|
||||
gh api users/YOUR_USERNAME --jq '.id'
|
||||
```
|
||||
|
||||
5. Update your workflow with `bot_id` and `bot_name` matching the account where you added the signing key.
|
||||
|
||||
**Note:** If both `ssh_signing_key` and `use_commit_signing` are provided, `ssh_signing_key` takes precedence.
|
||||
Commits made by Claude through this action are no longer automatically signed with commit signatures. To enable commit signing set `use_commit_signing: True` in the workflow(s). This ensures the authenticity and integrity of commits, providing a verifiable trail of changes made by the action.
|
||||
|
||||
## ⚠️ Authentication Protection
|
||||
|
||||
|
||||
@@ -71,10 +71,9 @@ jobs:
|
||||
| `branch_prefix` | The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format) | No | `claude/` |
|
||||
| `settings` | Claude Code settings as JSON string or path to settings JSON file | No | "" |
|
||||
| `additional_permissions` | Additional permissions to enable. Currently supports 'actions: read' for viewing workflow results | No | "" |
|
||||
| `use_commit_signing` | Enable commit signing using GitHub's API. Simple but cannot perform complex git operations like rebasing. See [Security](./security.md#commit-signing) | No | `false` |
|
||||
| `ssh_signing_key` | SSH private key for signing commits. Enables signed commits with full git CLI support (rebasing, etc.). See [Security](./security.md#commit-signing) | No | "" |
|
||||
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID). Required with `ssh_signing_key` for verified commits | No | `41898282` |
|
||||
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name). Required with `ssh_signing_key` for verified commits | No | `claude[bot]` |
|
||||
| `use_commit_signing` | Enable commit signing using GitHub's commit signature verification. When false, Claude uses standard git commands | No | `false` |
|
||||
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID) | No | `41898282` |
|
||||
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name) | No | `claude[bot]` |
|
||||
| `allowed_bots` | Comma-separated list of allowed bot usernames, or '\*' to allow all bots. Empty string (default) allows no bots | No | "" |
|
||||
| `allowed_non_write_users` | **⚠️ RISKY**: Comma-separated list of usernames to allow without write permissions, or '\*' for all users. Only works with `github_token` input. See [Security](./security.md) | No | "" |
|
||||
| `path_to_claude_code_executable` | Optional path to a custom Claude Code executable. Skips automatic installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.1",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||
"@octokit/graphql": "^8.2.2",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
|
||||
@@ -21,12 +21,8 @@ import type { ParsedGitHubContext } from "../github/context";
|
||||
import type { CommonFields, PreparedContext, EventData } from "./types";
|
||||
import { GITHUB_SERVER_URL } from "../github/api/config";
|
||||
import type { Mode, ModeContext } from "../modes/types";
|
||||
import { extractUserRequest } from "../utils/extract-user-request";
|
||||
export type { CommonFields, PreparedContext } from "./types";
|
||||
|
||||
/** Filename for the user request file, read by the SDK runner */
|
||||
const USER_REQUEST_FILENAME = "claude-user-request.txt";
|
||||
|
||||
// Tag mode defaults - these tools are needed for tag mode to function
|
||||
const BASE_ALLOWED_TOOLS = [
|
||||
"Edit",
|
||||
@@ -851,55 +847,6 @@ f. If you are unable to complete certain steps, such as running a linter or test
|
||||
return promptContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the user's request from the prepared context and GitHub data.
|
||||
*
|
||||
* This is used to send the user's actual command/request as a separate
|
||||
* content block, enabling slash command processing in the CLI.
|
||||
*
|
||||
* @param context - The prepared context containing event data and trigger phrase
|
||||
* @param githubData - The fetched GitHub data containing issue/PR body content
|
||||
* @returns The extracted user request text (e.g., "/review-pr" or "fix this bug"),
|
||||
* or null for assigned/labeled events without an explicit trigger in the body
|
||||
*
|
||||
* @example
|
||||
* // Comment event: "@claude /review-pr" -> returns "/review-pr"
|
||||
* // Issue body with "@claude fix this" -> returns "fix this"
|
||||
* // Issue assigned without @claude in body -> returns null
|
||||
*/
|
||||
function extractUserRequestFromContext(
|
||||
context: PreparedContext,
|
||||
githubData: FetchDataResult,
|
||||
): string | null {
|
||||
const { eventData, triggerPhrase } = context;
|
||||
|
||||
// For comment events, extract from comment body
|
||||
if (
|
||||
"commentBody" in eventData &&
|
||||
eventData.commentBody &&
|
||||
(eventData.eventName === "issue_comment" ||
|
||||
eventData.eventName === "pull_request_review_comment" ||
|
||||
eventData.eventName === "pull_request_review")
|
||||
) {
|
||||
return extractUserRequest(eventData.commentBody, triggerPhrase);
|
||||
}
|
||||
|
||||
// For issue/PR events triggered by body content, extract from the body
|
||||
if (githubData.contextData?.body) {
|
||||
const request = extractUserRequest(
|
||||
githubData.contextData.body,
|
||||
triggerPhrase,
|
||||
);
|
||||
if (request) {
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
||||
// For assigned/labeled events without explicit trigger in body,
|
||||
// return null to indicate the full context should be used
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function createPrompt(
|
||||
mode: Mode,
|
||||
modeContext: ModeContext,
|
||||
@@ -948,22 +895,6 @@ export async function createPrompt(
|
||||
promptContent,
|
||||
);
|
||||
|
||||
// Extract and write the user request separately for SDK multi-block messaging
|
||||
// This allows the CLI to process slash commands (e.g., "@claude /review-pr")
|
||||
const userRequest = extractUserRequestFromContext(
|
||||
preparedContext,
|
||||
githubData,
|
||||
);
|
||||
if (userRequest) {
|
||||
await writeFile(
|
||||
`${process.env.RUNNER_TEMP || "/tmp"}/claude-prompts/${USER_REQUEST_FILENAME}`,
|
||||
userRequest,
|
||||
);
|
||||
console.log("===== USER REQUEST =====");
|
||||
console.log(userRequest);
|
||||
console.log("========================");
|
||||
}
|
||||
|
||||
// Set allowed tools
|
||||
const hasActionsReadPermission = false;
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
/**
|
||||
* Cleanup SSH signing key after action completes
|
||||
* This is run as a post step for security purposes
|
||||
*/
|
||||
|
||||
import { cleanupSshSigning } from "../github/operations/git-config";
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
await cleanupSshSigning();
|
||||
} catch (error) {
|
||||
// Don't fail the action if cleanup fails, just log it
|
||||
console.error("Failed to cleanup SSH signing key:", error);
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
run();
|
||||
}
|
||||
@@ -26,7 +26,6 @@ export function collectActionInputsPresence(): void {
|
||||
max_turns: "",
|
||||
use_sticky_comment: "false",
|
||||
use_commit_signing: "false",
|
||||
ssh_signing_key: "",
|
||||
};
|
||||
|
||||
const allInputsJson = process.env.ALL_INPUTS;
|
||||
|
||||
@@ -18,11 +18,6 @@ export const PR_QUERY = `
|
||||
additions
|
||||
deletions
|
||||
state
|
||||
labels(first: 1) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
commits(first: 100) {
|
||||
totalCount
|
||||
nodes {
|
||||
@@ -106,11 +101,6 @@ export const ISSUE_QUERY = `
|
||||
updatedAt
|
||||
lastEditedAt
|
||||
state
|
||||
labels(first: 1) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
comments(first: 100) {
|
||||
nodes {
|
||||
id
|
||||
|
||||
@@ -6,6 +6,7 @@ import type {
|
||||
PullRequestEvent,
|
||||
PullRequestReviewEvent,
|
||||
PullRequestReviewCommentEvent,
|
||||
PushEvent,
|
||||
WorkflowRunEvent,
|
||||
} from "@octokit/webhooks-types";
|
||||
import { CLAUDE_APP_BOT_ID, CLAUDE_BOT_LOGIN } from "./constants";
|
||||
@@ -65,6 +66,7 @@ const AUTOMATION_EVENT_NAMES = [
|
||||
"repository_dispatch",
|
||||
"schedule",
|
||||
"workflow_run",
|
||||
"push",
|
||||
] as const;
|
||||
|
||||
// Derive types from constants for better maintainability
|
||||
@@ -88,10 +90,8 @@ type BaseContext = {
|
||||
labelTrigger: string;
|
||||
baseBranch?: string;
|
||||
branchPrefix: string;
|
||||
branchNameTemplate?: string;
|
||||
useStickyComment: boolean;
|
||||
useCommitSigning: boolean;
|
||||
sshSigningKey: string;
|
||||
botId: string;
|
||||
botName: string;
|
||||
allowedBots: string;
|
||||
@@ -114,14 +114,15 @@ export type ParsedGitHubContext = BaseContext & {
|
||||
isPR: boolean;
|
||||
};
|
||||
|
||||
// Context for automation events (workflow_dispatch, repository_dispatch, schedule, workflow_run)
|
||||
// Context for automation events (workflow_dispatch, repository_dispatch, schedule, workflow_run, push)
|
||||
export type AutomationContext = BaseContext & {
|
||||
eventName: AutomationEventName;
|
||||
payload:
|
||||
| WorkflowDispatchEvent
|
||||
| RepositoryDispatchEvent
|
||||
| ScheduleEvent
|
||||
| WorkflowRunEvent;
|
||||
| WorkflowRunEvent
|
||||
| PushEvent;
|
||||
};
|
||||
|
||||
// Union type for all contexts
|
||||
@@ -146,10 +147,8 @@ export function parseGitHubContext(): GitHubContext {
|
||||
labelTrigger: process.env.LABEL_TRIGGER ?? "",
|
||||
baseBranch: process.env.BASE_BRANCH,
|
||||
branchPrefix: process.env.BRANCH_PREFIX ?? "claude/",
|
||||
branchNameTemplate: process.env.BRANCH_NAME_TEMPLATE,
|
||||
useStickyComment: process.env.USE_STICKY_COMMENT === "true",
|
||||
useCommitSigning: process.env.USE_COMMIT_SIGNING === "true",
|
||||
sshSigningKey: process.env.SSH_SIGNING_KEY || "",
|
||||
botId: process.env.BOT_ID ?? String(CLAUDE_APP_BOT_ID),
|
||||
botName: process.env.BOT_NAME ?? CLAUDE_BOT_LOGIN,
|
||||
allowedBots: process.env.ALLOWED_BOTS ?? "",
|
||||
@@ -239,6 +238,13 @@ export function parseGitHubContext(): GitHubContext {
|
||||
payload: context.payload as unknown as WorkflowRunEvent,
|
||||
};
|
||||
}
|
||||
case "push": {
|
||||
return {
|
||||
...commonFields,
|
||||
eventName: "push",
|
||||
payload: context.payload as unknown as PushEvent,
|
||||
};
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported event type: ${context.eventName}`);
|
||||
}
|
||||
@@ -280,6 +286,12 @@ export function isIssuesAssignedEvent(
|
||||
return isIssuesEvent(context) && context.eventAction === "assigned";
|
||||
}
|
||||
|
||||
export function isPushEvent(
|
||||
context: GitHubContext,
|
||||
): context is AutomationContext & { payload: PushEvent } {
|
||||
return context.eventName === "push";
|
||||
}
|
||||
|
||||
// Type guard to check if context is an entity context (has entityNumber and isPR)
|
||||
export function isEntityContext(
|
||||
context: GitHubContext,
|
||||
|
||||
@@ -3,8 +3,6 @@ import type { Octokits } from "../api/client";
|
||||
import { ISSUE_QUERY, PR_QUERY, USER_QUERY } from "../api/queries/github";
|
||||
import {
|
||||
isIssueCommentEvent,
|
||||
isIssuesEvent,
|
||||
isPullRequestEvent,
|
||||
isPullRequestReviewEvent,
|
||||
isPullRequestReviewCommentEvent,
|
||||
type ParsedGitHubContext,
|
||||
@@ -42,31 +40,6 @@ export function extractTriggerTimestamp(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the original title from the GitHub webhook payload.
|
||||
* This is the title as it existed when the trigger event occurred.
|
||||
*
|
||||
* @param context - Parsed GitHub context from webhook
|
||||
* @returns The original title string or undefined if not available
|
||||
*/
|
||||
export function extractOriginalTitle(
|
||||
context: ParsedGitHubContext,
|
||||
): string | undefined {
|
||||
if (isIssueCommentEvent(context)) {
|
||||
return context.payload.issue?.title;
|
||||
} else if (isPullRequestEvent(context)) {
|
||||
return context.payload.pull_request?.title;
|
||||
} else if (isPullRequestReviewEvent(context)) {
|
||||
return context.payload.pull_request?.title;
|
||||
} else if (isPullRequestReviewCommentEvent(context)) {
|
||||
return context.payload.pull_request?.title;
|
||||
} else if (isIssuesEvent(context)) {
|
||||
return context.payload.issue?.title;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters comments to only include those that existed in their final state before the trigger time.
|
||||
* This prevents malicious actors from editing comments after the trigger to inject harmful content.
|
||||
@@ -173,7 +146,6 @@ type FetchDataParams = {
|
||||
isPR: boolean;
|
||||
triggerUsername?: string;
|
||||
triggerTime?: string;
|
||||
originalTitle?: string;
|
||||
};
|
||||
|
||||
export type GitHubFileWithSHA = GitHubFile & {
|
||||
@@ -197,7 +169,6 @@ export async function fetchGitHubData({
|
||||
isPR,
|
||||
triggerUsername,
|
||||
triggerTime,
|
||||
originalTitle,
|
||||
}: FetchDataParams): Promise<FetchDataResult> {
|
||||
const [owner, repo] = repository.split("/");
|
||||
if (!owner || !repo) {
|
||||
@@ -383,11 +354,6 @@ export async function fetchGitHubData({
|
||||
triggerDisplayName = await fetchUserDisplayName(octokits, triggerUsername);
|
||||
}
|
||||
|
||||
// Use the original title from the webhook payload if provided
|
||||
if (originalTitle !== undefined) {
|
||||
contextData.title = originalTitle;
|
||||
}
|
||||
|
||||
return {
|
||||
contextData,
|
||||
comments,
|
||||
|
||||
@@ -14,8 +14,7 @@ export function formatContext(
|
||||
): string {
|
||||
if (isPR) {
|
||||
const prData = contextData as GitHubPullRequest;
|
||||
const sanitizedTitle = sanitizeContent(prData.title);
|
||||
return `PR Title: ${sanitizedTitle}
|
||||
return `PR Title: ${prData.title}
|
||||
PR Author: ${prData.author.login}
|
||||
PR Branch: ${prData.headRefName} -> ${prData.baseRefName}
|
||||
PR State: ${prData.state}
|
||||
@@ -25,8 +24,7 @@ Total Commits: ${prData.commits.totalCount}
|
||||
Changed Files: ${prData.files.nodes.length} files`;
|
||||
} else {
|
||||
const issueData = contextData as GitHubIssue;
|
||||
const sanitizedTitle = sanitizeContent(issueData.title);
|
||||
return `Issue Title: ${sanitizedTitle}
|
||||
return `Issue Title: ${issueData.title}
|
||||
Issue Author: ${issueData.author.login}
|
||||
Issue State: ${issueData.state}`;
|
||||
}
|
||||
|
||||
@@ -6,22 +6,12 @@
|
||||
* - For Issues: Create a new branch
|
||||
*/
|
||||
|
||||
import { $ } from "bun";
|
||||
import { execFileSync } from "child_process";
|
||||
import * as core from "@actions/core";
|
||||
import type { ParsedGitHubContext } from "../context";
|
||||
import type { GitHubPullRequest } from "../types";
|
||||
import type { Octokits } from "../api/client";
|
||||
import type { FetchDataResult } from "../data/fetcher";
|
||||
import { generateBranchName } from "../../utils/branch-template";
|
||||
|
||||
/**
|
||||
* Extracts the first label from GitHub data, or returns undefined if no labels exist
|
||||
*/
|
||||
function extractFirstLabel(githubData: FetchDataResult): string | undefined {
|
||||
const labels = githubData.contextData.labels?.nodes;
|
||||
return labels && labels.length > 0 ? labels[0]?.name : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a git branch name against a strict whitelist pattern.
|
||||
@@ -135,7 +125,7 @@ export async function setupBranch(
|
||||
): Promise<BranchInfo> {
|
||||
const { owner, repo } = context.repository;
|
||||
const entityNumber = context.entityNumber;
|
||||
const { baseBranch, branchPrefix, branchNameTemplate } = context.inputs;
|
||||
const { baseBranch, branchPrefix } = context.inputs;
|
||||
const isPR = context.isPR;
|
||||
|
||||
if (isPR) {
|
||||
@@ -201,8 +191,17 @@ export async function setupBranch(
|
||||
// Generate branch name for either an issue or closed/merged PR
|
||||
const entityType = isPR ? "pr" : "issue";
|
||||
|
||||
// Get the SHA of the source branch to use in template
|
||||
let sourceSHA: string | undefined;
|
||||
// Create Kubernetes-compatible timestamp: lowercase, hyphens only, shorter format
|
||||
const now = new Date();
|
||||
const timestamp = `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`;
|
||||
|
||||
// Ensure branch name is Kubernetes-compatible:
|
||||
// - Lowercase only
|
||||
// - Alphanumeric with hyphens
|
||||
// - No underscores
|
||||
// - Max 50 chars (to allow for prefixes)
|
||||
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${timestamp}`;
|
||||
const newBranch = branchName.toLowerCase().substring(0, 50);
|
||||
|
||||
try {
|
||||
// Get the SHA of the source branch to verify it exists
|
||||
@@ -212,46 +211,8 @@ export async function setupBranch(
|
||||
ref: `heads/${sourceBranch}`,
|
||||
});
|
||||
|
||||
sourceSHA = sourceBranchRef.data.object.sha;
|
||||
console.log(`Source branch SHA: ${sourceSHA}`);
|
||||
|
||||
// Extract first label from GitHub data
|
||||
const firstLabel = extractFirstLabel(githubData);
|
||||
|
||||
// Extract title from GitHub data
|
||||
const title = githubData.contextData.title;
|
||||
|
||||
// Generate branch name using template or default format
|
||||
let newBranch = generateBranchName(
|
||||
branchNameTemplate,
|
||||
branchPrefix,
|
||||
entityType,
|
||||
entityNumber,
|
||||
sourceSHA,
|
||||
firstLabel,
|
||||
title,
|
||||
);
|
||||
|
||||
// Check if generated branch already exists on remote
|
||||
try {
|
||||
await $`git ls-remote --exit-code origin refs/heads/${newBranch}`.quiet();
|
||||
|
||||
// If we get here, branch exists (exit code 0)
|
||||
console.log(
|
||||
`Branch '${newBranch}' already exists, falling back to default format`,
|
||||
);
|
||||
newBranch = generateBranchName(
|
||||
undefined, // Force default template
|
||||
branchPrefix,
|
||||
entityType,
|
||||
entityNumber,
|
||||
sourceSHA,
|
||||
firstLabel,
|
||||
title,
|
||||
);
|
||||
} catch {
|
||||
// Branch doesn't exist (non-zero exit code), continue with generated name
|
||||
}
|
||||
const currentSHA = sourceBranchRef.data.object.sha;
|
||||
console.log(`Source branch SHA: ${currentSHA}`);
|
||||
|
||||
// For commit signing, defer branch creation to the file ops server
|
||||
if (context.inputs.useCommitSigning) {
|
||||
|
||||
@@ -6,14 +6,9 @@
|
||||
*/
|
||||
|
||||
import { $ } from "bun";
|
||||
import { mkdir, writeFile, rm } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import { homedir } from "os";
|
||||
import type { GitHubContext } from "../context";
|
||||
import { GITHUB_SERVER_URL } from "../api/config";
|
||||
|
||||
const SSH_SIGNING_KEY_PATH = join(homedir(), ".ssh", "claude_signing_key");
|
||||
|
||||
type GitUser = {
|
||||
login: string;
|
||||
id: number;
|
||||
@@ -59,50 +54,3 @@ export async function configureGitAuth(
|
||||
|
||||
console.log("Git authentication configured successfully");
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure git to use SSH signing for commits
|
||||
* This is an alternative to GitHub API-based commit signing (use_commit_signing)
|
||||
*/
|
||||
export async function setupSshSigning(sshSigningKey: string): Promise<void> {
|
||||
console.log("Configuring SSH signing for commits...");
|
||||
|
||||
// Validate SSH key format
|
||||
if (!sshSigningKey.trim()) {
|
||||
throw new Error("SSH signing key cannot be empty");
|
||||
}
|
||||
if (
|
||||
!sshSigningKey.includes("BEGIN") ||
|
||||
!sshSigningKey.includes("PRIVATE KEY")
|
||||
) {
|
||||
throw new Error("Invalid SSH private key format");
|
||||
}
|
||||
|
||||
// Create .ssh directory with secure permissions (700)
|
||||
const sshDir = join(homedir(), ".ssh");
|
||||
await mkdir(sshDir, { recursive: true, mode: 0o700 });
|
||||
|
||||
// Write the signing key atomically with secure permissions (600)
|
||||
await writeFile(SSH_SIGNING_KEY_PATH, sshSigningKey, { mode: 0o600 });
|
||||
console.log(`✓ SSH signing key written to ${SSH_SIGNING_KEY_PATH}`);
|
||||
|
||||
// Configure git to use SSH signing
|
||||
await $`git config gpg.format ssh`;
|
||||
await $`git config user.signingkey ${SSH_SIGNING_KEY_PATH}`;
|
||||
await $`git config commit.gpgsign true`;
|
||||
|
||||
console.log("✓ Git configured to use SSH signing for commits");
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up the SSH signing key file
|
||||
* Should be called in the post step for security
|
||||
*/
|
||||
export async function cleanupSshSigning(): Promise<void> {
|
||||
try {
|
||||
await rm(SSH_SIGNING_KEY_PATH, { force: true });
|
||||
console.log("✓ SSH signing key cleaned up");
|
||||
} catch (error) {
|
||||
console.log("No SSH signing key to clean up");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,11 +63,6 @@ export type GitHubPullRequest = {
|
||||
additions: number;
|
||||
deletions: number;
|
||||
state: string;
|
||||
labels: {
|
||||
nodes: Array<{
|
||||
name: string;
|
||||
}>;
|
||||
};
|
||||
commits: {
|
||||
totalCount: number;
|
||||
nodes: Array<{
|
||||
@@ -93,11 +88,6 @@ export type GitHubIssue = {
|
||||
updatedAt?: string;
|
||||
lastEditedAt?: string;
|
||||
state: string;
|
||||
labels: {
|
||||
nodes: Array<{
|
||||
name: string;
|
||||
}>;
|
||||
};
|
||||
comments: {
|
||||
nodes: GitHubComment[];
|
||||
};
|
||||
|
||||
@@ -4,12 +4,11 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import { z } from "zod";
|
||||
import { readFile, stat } from "fs/promises";
|
||||
import { resolve } from "path";
|
||||
import { join } from "path";
|
||||
import { constants } from "fs";
|
||||
import fetch from "node-fetch";
|
||||
import { GITHUB_API_URL } from "../github/api/config";
|
||||
import { retryWithBackoff } from "../utils/retry";
|
||||
import { validatePathWithinRepo } from "./path-validation";
|
||||
|
||||
type GitHubRef = {
|
||||
object: {
|
||||
@@ -214,18 +213,12 @@ server.tool(
|
||||
throw new Error("GITHUB_TOKEN environment variable is required");
|
||||
}
|
||||
|
||||
// Validate all paths are within repository root and get full/relative paths
|
||||
const resolvedRepoDir = resolve(REPO_DIR);
|
||||
const validatedFiles = await Promise.all(
|
||||
files.map(async (filePath) => {
|
||||
const fullPath = await validatePathWithinRepo(filePath, REPO_DIR);
|
||||
// Calculate the relative path for the git tree entry
|
||||
// Use the original filePath (normalized) for the git path, not the symlink-resolved path
|
||||
const normalizedPath = resolve(resolvedRepoDir, filePath);
|
||||
const relativePath = normalizedPath.slice(resolvedRepoDir.length + 1);
|
||||
return { fullPath, relativePath };
|
||||
}),
|
||||
);
|
||||
const processedFiles = files.map((filePath) => {
|
||||
if (filePath.startsWith("/")) {
|
||||
return filePath.slice(1);
|
||||
}
|
||||
return filePath;
|
||||
});
|
||||
|
||||
// 1. Get the branch reference (create if doesn't exist)
|
||||
const baseSha = await getOrCreateBranchRef(
|
||||
@@ -254,14 +247,18 @@ server.tool(
|
||||
|
||||
// 3. Create tree entries for all files
|
||||
const treeEntries = await Promise.all(
|
||||
validatedFiles.map(async ({ fullPath, relativePath }) => {
|
||||
processedFiles.map(async (filePath) => {
|
||||
const fullPath = filePath.startsWith("/")
|
||||
? filePath
|
||||
: join(REPO_DIR, filePath);
|
||||
|
||||
// Get the proper file mode based on file permissions
|
||||
const fileMode = await getFileMode(fullPath);
|
||||
|
||||
// Check if file is binary (images, etc.)
|
||||
const isBinaryFile =
|
||||
/\.(png|jpg|jpeg|gif|webp|ico|pdf|zip|tar|gz|exe|bin|woff|woff2|ttf|eot)$/i.test(
|
||||
relativePath,
|
||||
filePath,
|
||||
);
|
||||
|
||||
if (isBinaryFile) {
|
||||
@@ -287,7 +284,7 @@ server.tool(
|
||||
if (!blobResponse.ok) {
|
||||
const errorText = await blobResponse.text();
|
||||
throw new Error(
|
||||
`Failed to create blob for ${relativePath}: ${blobResponse.status} - ${errorText}`,
|
||||
`Failed to create blob for ${filePath}: ${blobResponse.status} - ${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -295,7 +292,7 @@ server.tool(
|
||||
|
||||
// Return tree entry with blob SHA
|
||||
return {
|
||||
path: relativePath,
|
||||
path: filePath,
|
||||
mode: fileMode,
|
||||
type: "blob",
|
||||
sha: blobData.sha,
|
||||
@@ -304,7 +301,7 @@ server.tool(
|
||||
// For text files, include content directly in tree
|
||||
const content = await readFile(fullPath, "utf-8");
|
||||
return {
|
||||
path: relativePath,
|
||||
path: filePath,
|
||||
mode: fileMode,
|
||||
type: "blob",
|
||||
content: content,
|
||||
@@ -424,9 +421,7 @@ server.tool(
|
||||
author: newCommitData.author.name,
|
||||
date: newCommitData.author.date,
|
||||
},
|
||||
files: validatedFiles.map(({ relativePath }) => ({
|
||||
path: relativePath,
|
||||
})),
|
||||
files: processedFiles.map((path) => ({ path })),
|
||||
tree: {
|
||||
sha: treeData.sha,
|
||||
},
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import { realpath } from "fs/promises";
|
||||
import { resolve, sep } from "path";
|
||||
|
||||
/**
|
||||
* Validates that a file path resolves within the repository root.
|
||||
* Prevents path traversal attacks via "../" sequences and symlinks.
|
||||
* @param filePath - The file path to validate (can be relative or absolute)
|
||||
* @param repoRoot - The repository root directory
|
||||
* @returns The resolved absolute path (with symlinks resolved) if valid
|
||||
* @throws Error if the path resolves outside the repository root
|
||||
*/
|
||||
export async function validatePathWithinRepo(
|
||||
filePath: string,
|
||||
repoRoot: string,
|
||||
): Promise<string> {
|
||||
// First resolve the path string (handles .. and . segments)
|
||||
const initialPath = resolve(repoRoot, filePath);
|
||||
|
||||
// Resolve symlinks to get the real path
|
||||
// This prevents symlink attacks where a link inside the repo points outside
|
||||
let resolvedRoot: string;
|
||||
let resolvedPath: string;
|
||||
|
||||
try {
|
||||
resolvedRoot = await realpath(repoRoot);
|
||||
} catch {
|
||||
throw new Error(`Repository root '${repoRoot}' does not exist`);
|
||||
}
|
||||
|
||||
try {
|
||||
resolvedPath = await realpath(initialPath);
|
||||
} catch {
|
||||
// File doesn't exist yet - fall back to checking the parent directory
|
||||
// This handles the case where we're creating a new file
|
||||
const parentDir = resolve(initialPath, "..");
|
||||
try {
|
||||
const resolvedParent = await realpath(parentDir);
|
||||
if (
|
||||
resolvedParent !== resolvedRoot &&
|
||||
!resolvedParent.startsWith(resolvedRoot + sep)
|
||||
) {
|
||||
throw new Error(
|
||||
`Path '${filePath}' resolves outside the repository root`,
|
||||
);
|
||||
}
|
||||
// Parent is valid, return the initial path since file doesn't exist yet
|
||||
return initialPath;
|
||||
} catch {
|
||||
throw new Error(
|
||||
`Path '${filePath}' resolves outside the repository root`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Path must be within repo root (or be the root itself)
|
||||
if (
|
||||
resolvedPath !== resolvedRoot &&
|
||||
!resolvedPath.startsWith(resolvedRoot + sep)
|
||||
) {
|
||||
throw new Error(`Path '${filePath}' resolves outside the repository root`);
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
}
|
||||
@@ -4,10 +4,7 @@ import type { Mode, ModeOptions, ModeResult } from "../types";
|
||||
import type { PreparedContext } from "../../create-prompt/types";
|
||||
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
||||
import { parseAllowedTools } from "./parse-tools";
|
||||
import {
|
||||
configureGitAuth,
|
||||
setupSshSigning,
|
||||
} from "../../github/operations/git-config";
|
||||
import { configureGitAuth } from "../../github/operations/git-config";
|
||||
import type { GitHubContext } from "../../github/context";
|
||||
import { isEntityContext } from "../../github/context";
|
||||
|
||||
@@ -82,27 +79,7 @@ export const agentMode: Mode = {
|
||||
|
||||
async prepare({ context, githubToken }: ModeOptions): Promise<ModeResult> {
|
||||
// Configure git authentication for agent mode (same as tag mode)
|
||||
// SSH signing takes precedence if provided
|
||||
const useSshSigning = !!context.inputs.sshSigningKey;
|
||||
const useApiCommitSigning =
|
||||
context.inputs.useCommitSigning && !useSshSigning;
|
||||
|
||||
if (useSshSigning) {
|
||||
// Setup SSH signing for commits
|
||||
await setupSshSigning(context.inputs.sshSigningKey);
|
||||
|
||||
// Still configure git auth for push operations (user/email and remote URL)
|
||||
const user = {
|
||||
login: context.inputs.botName,
|
||||
id: parseInt(context.inputs.botId),
|
||||
};
|
||||
try {
|
||||
await configureGitAuth(githubToken, context, user);
|
||||
} catch (error) {
|
||||
console.error("Failed to configure git authentication:", error);
|
||||
// Continue anyway - git operations may still work with default config
|
||||
}
|
||||
} else if (!useApiCommitSigning) {
|
||||
if (!context.inputs.useCommitSigning) {
|
||||
// Use bot_id and bot_name from inputs directly
|
||||
const user = {
|
||||
login: context.inputs.botName,
|
||||
|
||||
@@ -4,15 +4,11 @@ import { checkContainsTrigger } from "../../github/validation/trigger";
|
||||
import { checkHumanActor } from "../../github/validation/actor";
|
||||
import { createInitialComment } from "../../github/operations/comments/create-initial";
|
||||
import { setupBranch } from "../../github/operations/branch";
|
||||
import {
|
||||
configureGitAuth,
|
||||
setupSshSigning,
|
||||
} from "../../github/operations/git-config";
|
||||
import { configureGitAuth } from "../../github/operations/git-config";
|
||||
import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
||||
import {
|
||||
fetchGitHubData,
|
||||
extractTriggerTimestamp,
|
||||
extractOriginalTitle,
|
||||
} from "../../github/data/fetcher";
|
||||
import { createPrompt, generateDefaultPrompt } from "../../create-prompt";
|
||||
import { isEntityContext } from "../../github/context";
|
||||
@@ -79,7 +75,6 @@ export const tagMode: Mode = {
|
||||
const commentId = commentData.id;
|
||||
|
||||
const triggerTime = extractTriggerTimestamp(context);
|
||||
const originalTitle = extractOriginalTitle(context);
|
||||
|
||||
const githubData = await fetchGitHubData({
|
||||
octokits: octokit,
|
||||
@@ -88,34 +83,13 @@ export const tagMode: Mode = {
|
||||
isPR: context.isPR,
|
||||
triggerUsername: context.actor,
|
||||
triggerTime,
|
||||
originalTitle,
|
||||
});
|
||||
|
||||
// Setup branch
|
||||
const branchInfo = await setupBranch(octokit, githubData, context);
|
||||
|
||||
// Configure git authentication
|
||||
// SSH signing takes precedence if provided
|
||||
const useSshSigning = !!context.inputs.sshSigningKey;
|
||||
const useApiCommitSigning =
|
||||
context.inputs.useCommitSigning && !useSshSigning;
|
||||
|
||||
if (useSshSigning) {
|
||||
// Setup SSH signing for commits
|
||||
await setupSshSigning(context.inputs.sshSigningKey);
|
||||
|
||||
// Still configure git auth for push operations (user/email and remote URL)
|
||||
const user = {
|
||||
login: context.inputs.botName,
|
||||
id: parseInt(context.inputs.botId),
|
||||
};
|
||||
try {
|
||||
await configureGitAuth(githubToken, context, user);
|
||||
} catch (error) {
|
||||
console.error("Failed to configure git authentication:", error);
|
||||
throw error;
|
||||
}
|
||||
} else if (!useApiCommitSigning) {
|
||||
// Configure git authentication if not using commit signing
|
||||
if (!context.inputs.useCommitSigning) {
|
||||
// Use bot_id and bot_name from inputs directly
|
||||
const user = {
|
||||
login: context.inputs.botName,
|
||||
@@ -161,9 +135,8 @@ export const tagMode: Mode = {
|
||||
...userAllowedMCPTools,
|
||||
];
|
||||
|
||||
// Add git commands when using git CLI (no API commit signing, or SSH signing)
|
||||
// SSH signing still uses git CLI, just with signing enabled
|
||||
if (!useApiCommitSigning) {
|
||||
// Add git commands when not using commit signing
|
||||
if (!context.inputs.useCommitSigning) {
|
||||
tagModeTools.push(
|
||||
"Bash(git add:*)",
|
||||
"Bash(git commit:*)",
|
||||
@@ -174,7 +147,7 @@ export const tagMode: Mode = {
|
||||
"Bash(git rm:*)",
|
||||
);
|
||||
} else {
|
||||
// When using API commit signing, use MCP file ops tools
|
||||
// When using commit signing, use MCP file ops tools
|
||||
tagModeTools.push(
|
||||
"mcp__github_file_ops__commit_files",
|
||||
"mcp__github_file_ops__delete_files",
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
/**
|
||||
* Branch name template parsing and variable substitution utilities
|
||||
*/
|
||||
|
||||
const NUM_DESCRIPTION_WORDS = 5;
|
||||
|
||||
/**
|
||||
* Extracts the first 5 words from a title and converts them to kebab-case
|
||||
*/
|
||||
function extractDescription(
|
||||
title: string,
|
||||
numWords: number = NUM_DESCRIPTION_WORDS,
|
||||
): string {
|
||||
if (!title || title.trim() === "") {
|
||||
return "";
|
||||
}
|
||||
|
||||
return title
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.slice(0, numWords) // Only first `numWords` words
|
||||
.join("-")
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9-]/g, "") // Remove non-alphanumeric except hyphens
|
||||
.replace(/-+/g, "-") // Replace multiple hyphens with single
|
||||
.replace(/^-|-$/g, ""); // Remove leading/trailing hyphens
|
||||
}
|
||||
|
||||
export interface BranchTemplateVariables {
|
||||
prefix: string;
|
||||
entityType: string;
|
||||
entityNumber: number;
|
||||
timestamp: string;
|
||||
sha?: string;
|
||||
label?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces template variables in a branch name template
|
||||
* Template format: {{variableName}}
|
||||
*/
|
||||
export function applyBranchTemplate(
|
||||
template: string,
|
||||
variables: BranchTemplateVariables,
|
||||
): string {
|
||||
let result = template;
|
||||
|
||||
// Replace each variable
|
||||
Object.entries(variables).forEach(([key, value]) => {
|
||||
const placeholder = `{{${key}}}`;
|
||||
const replacement = value ? String(value) : "";
|
||||
result = result.replaceAll(placeholder, replacement);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a branch name from the provided `template` and set of `variables`. Uses a default format if the template is empty or produces an empty result.
|
||||
*/
|
||||
export function generateBranchName(
|
||||
template: string | undefined,
|
||||
branchPrefix: string,
|
||||
entityType: string,
|
||||
entityNumber: number,
|
||||
sha?: string,
|
||||
label?: string,
|
||||
title?: string,
|
||||
): string {
|
||||
const now = new Date();
|
||||
|
||||
const variables: BranchTemplateVariables = {
|
||||
prefix: branchPrefix,
|
||||
entityType,
|
||||
entityNumber,
|
||||
timestamp: `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`,
|
||||
sha: sha?.substring(0, 8), // First 8 characters of SHA
|
||||
label: label || entityType, // Fall back to entityType if no label
|
||||
description: title ? extractDescription(title) : undefined,
|
||||
};
|
||||
|
||||
if (template?.trim()) {
|
||||
const branchName = applyBranchTemplate(template, variables);
|
||||
|
||||
// Some templates could produce empty results- validate
|
||||
if (branchName.trim().length > 0) return branchName;
|
||||
|
||||
console.log(
|
||||
`Branch template '${template}' generated empty result, falling back to default format`,
|
||||
);
|
||||
}
|
||||
|
||||
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${variables.timestamp}`;
|
||||
// Kubernetes compatible: lowercase, max 50 chars, alphanumeric and hyphens only
|
||||
return branchName.toLowerCase().substring(0, 50);
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
/**
|
||||
* Extracts the user's request from a trigger comment.
|
||||
*
|
||||
* Given a comment like "@claude /review-pr please check the auth module",
|
||||
* this extracts "/review-pr please check the auth module".
|
||||
*
|
||||
* @param commentBody - The full comment body containing the trigger phrase
|
||||
* @param triggerPhrase - The trigger phrase (e.g., "@claude")
|
||||
* @returns The user's request (text after the trigger phrase), or null if not found
|
||||
*/
|
||||
export function extractUserRequest(
|
||||
commentBody: string | undefined,
|
||||
triggerPhrase: string,
|
||||
): string | null {
|
||||
if (!commentBody) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Use string operations instead of regex for better performance and security
|
||||
// (avoids potential ReDoS with large comment bodies)
|
||||
const triggerIndex = commentBody
|
||||
.toLowerCase()
|
||||
.indexOf(triggerPhrase.toLowerCase());
|
||||
if (triggerIndex === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const afterTrigger = commentBody
|
||||
.substring(triggerIndex + triggerPhrase.length)
|
||||
.trim();
|
||||
return afterTrigger || null;
|
||||
}
|
||||
@@ -1,247 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import {
|
||||
applyBranchTemplate,
|
||||
generateBranchName,
|
||||
} from "../src/utils/branch-template";
|
||||
|
||||
describe("branch template utilities", () => {
|
||||
describe("applyBranchTemplate", () => {
|
||||
it("should replace all template variables", () => {
|
||||
const template =
|
||||
"{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}";
|
||||
const variables = {
|
||||
prefix: "feat/",
|
||||
entityType: "issue",
|
||||
entityNumber: 123,
|
||||
timestamp: "20240301-1430",
|
||||
sha: "abcd1234",
|
||||
};
|
||||
|
||||
const result = applyBranchTemplate(template, variables);
|
||||
expect(result).toBe("feat/issue-123-20240301-1430");
|
||||
});
|
||||
|
||||
it("should handle custom templates with multiple variables", () => {
|
||||
const template =
|
||||
"{{prefix}}fix/{{entityType}}_{{entityNumber}}_{{timestamp}}_{{sha}}";
|
||||
const variables = {
|
||||
prefix: "claude-",
|
||||
entityType: "pr",
|
||||
entityNumber: 456,
|
||||
timestamp: "20240301-1430",
|
||||
sha: "abcd1234",
|
||||
};
|
||||
|
||||
const result = applyBranchTemplate(template, variables);
|
||||
expect(result).toBe("claude-fix/pr_456_20240301-1430_abcd1234");
|
||||
});
|
||||
|
||||
it("should handle templates with missing variables gracefully", () => {
|
||||
const template = "{{prefix}}{{entityType}}-{{missing}}-{{entityNumber}}";
|
||||
const variables = {
|
||||
prefix: "feat/",
|
||||
entityType: "issue",
|
||||
entityNumber: 123,
|
||||
timestamp: "20240301-1430",
|
||||
};
|
||||
|
||||
const result = applyBranchTemplate(template, variables);
|
||||
expect(result).toBe("feat/issue-{{missing}}-123");
|
||||
});
|
||||
});
|
||||
|
||||
describe("generateBranchName", () => {
|
||||
it("should use custom template when provided", () => {
|
||||
const template = "{{prefix}}custom-{{entityType}}_{{entityNumber}}";
|
||||
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||
|
||||
expect(result).toBe("feature/custom-issue_123");
|
||||
});
|
||||
|
||||
it("should use default format when template is empty", () => {
|
||||
const result = generateBranchName("", "claude/", "issue", 123);
|
||||
|
||||
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||
});
|
||||
|
||||
it("should use default format when template is undefined", () => {
|
||||
const result = generateBranchName(undefined, "claude/", "pr", 456);
|
||||
|
||||
expect(result).toMatch(/^claude\/pr-456-\d{8}-\d{4}$/);
|
||||
});
|
||||
|
||||
it("should preserve custom template formatting (no automatic lowercase/truncation)", () => {
|
||||
const template = "{{prefix}}UPPERCASE_Branch-Name_{{entityNumber}}";
|
||||
const result = generateBranchName(template, "Feature/", "issue", 123);
|
||||
|
||||
expect(result).toBe("Feature/UPPERCASE_Branch-Name_123");
|
||||
});
|
||||
|
||||
it("should not truncate custom template results", () => {
|
||||
const template =
|
||||
"{{prefix}}very-long-branch-name-that-exceeds-the-maximum-allowed-length-{{entityNumber}}";
|
||||
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||
|
||||
expect(result).toBe(
|
||||
"feature/very-long-branch-name-that-exceeds-the-maximum-allowed-length-123",
|
||||
);
|
||||
});
|
||||
|
||||
it("should apply Kubernetes-compatible transformations to default template only", () => {
|
||||
const result = generateBranchName(undefined, "Feature/", "issue", 123);
|
||||
|
||||
expect(result).toMatch(/^feature\/issue-123-\d{8}-\d{4}$/);
|
||||
expect(result.length).toBeLessThanOrEqual(50);
|
||||
});
|
||||
|
||||
it("should handle SHA in template", () => {
|
||||
const template = "{{prefix}}{{entityType}}-{{entityNumber}}-{{sha}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"fix/",
|
||||
"pr",
|
||||
789,
|
||||
"abcdef123456",
|
||||
);
|
||||
|
||||
expect(result).toBe("fix/pr-789-abcdef12");
|
||||
});
|
||||
|
||||
it("should use label in template when provided", () => {
|
||||
const template = "{{prefix}}{{label}}/{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"feature/",
|
||||
"issue",
|
||||
123,
|
||||
undefined,
|
||||
"bug",
|
||||
);
|
||||
|
||||
expect(result).toBe("feature/bug/123");
|
||||
});
|
||||
|
||||
it("should fallback to entityType when label template is used but no label provided", () => {
|
||||
const template = "{{prefix}}{{label}}-{{entityNumber}}";
|
||||
const result = generateBranchName(template, "fix/", "pr", 456);
|
||||
|
||||
expect(result).toBe("fix/pr-456");
|
||||
});
|
||||
|
||||
it("should handle template with both label and entityType", () => {
|
||||
const template = "{{prefix}}{{label}}-{{entityType}}_{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"dev/",
|
||||
"issue",
|
||||
789,
|
||||
undefined,
|
||||
"enhancement",
|
||||
);
|
||||
|
||||
expect(result).toBe("dev/enhancement-issue_789");
|
||||
});
|
||||
|
||||
it("should use description in template when provided", () => {
|
||||
const template = "{{prefix}}{{description}}/{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"feature/",
|
||||
"issue",
|
||||
123,
|
||||
undefined,
|
||||
undefined,
|
||||
"Fix login bug with OAuth",
|
||||
);
|
||||
|
||||
expect(result).toBe("feature/fix-login-bug-with-oauth/123");
|
||||
});
|
||||
|
||||
it("should handle template with multiple variables including description", () => {
|
||||
const template =
|
||||
"{{prefix}}{{label}}/{{description}}-{{entityType}}_{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"dev/",
|
||||
"issue",
|
||||
456,
|
||||
undefined,
|
||||
"bug",
|
||||
"User authentication fails completely",
|
||||
);
|
||||
|
||||
expect(result).toBe(
|
||||
"dev/bug/user-authentication-fails-completely-issue_456",
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle description with special characters in template", () => {
|
||||
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"fix/",
|
||||
"pr",
|
||||
789,
|
||||
undefined,
|
||||
undefined,
|
||||
"Add: User Registration & Email Validation",
|
||||
);
|
||||
|
||||
expect(result).toBe("fix/add-user-registration-email-789");
|
||||
});
|
||||
|
||||
it("should truncate descriptions to exactly 5 words", () => {
|
||||
const result = generateBranchName(
|
||||
"{{prefix}}{{description}}/{{entityNumber}}",
|
||||
"feature/",
|
||||
"issue",
|
||||
999,
|
||||
undefined,
|
||||
undefined,
|
||||
"This is a very long title with many more than five words in it",
|
||||
);
|
||||
expect(result).toBe("feature/this-is-a-very-long/999");
|
||||
});
|
||||
|
||||
it("should handle empty description in template", () => {
|
||||
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"test/",
|
||||
"issue",
|
||||
101,
|
||||
undefined,
|
||||
undefined,
|
||||
"",
|
||||
);
|
||||
|
||||
expect(result).toBe("test/-101");
|
||||
});
|
||||
|
||||
it("should fallback to default format when template produces empty result", () => {
|
||||
const template = "{{description}}"; // Will be empty if no title provided
|
||||
const result = generateBranchName(template, "claude/", "issue", 123);
|
||||
|
||||
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||
expect(result.length).toBeLessThanOrEqual(50);
|
||||
});
|
||||
|
||||
it("should fallback to default format when template produces only whitespace", () => {
|
||||
const template = " {{description}} "; // Will be " " if description is empty
|
||||
const result = generateBranchName(
|
||||
template,
|
||||
"fix/",
|
||||
"pr",
|
||||
456,
|
||||
undefined,
|
||||
undefined,
|
||||
"",
|
||||
);
|
||||
|
||||
expect(result).toMatch(/^fix\/pr-456-\d{8}-\d{4}$/);
|
||||
expect(result.length).toBeLessThanOrEqual(50);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -61,7 +61,6 @@ describe("generatePrompt", () => {
|
||||
body: "This is a test PR",
|
||||
author: { login: "testuser" },
|
||||
state: "OPEN",
|
||||
labels: { nodes: [] },
|
||||
createdAt: "2023-01-01T00:00:00Z",
|
||||
additions: 15,
|
||||
deletions: 5,
|
||||
@@ -476,7 +475,6 @@ describe("generatePrompt", () => {
|
||||
body: "The login form is not working",
|
||||
author: { login: "testuser" },
|
||||
state: "OPEN",
|
||||
labels: { nodes: [] },
|
||||
createdAt: "2023-01-01T00:00:00Z",
|
||||
comments: {
|
||||
nodes: [],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { describe, expect, it, jest } from "bun:test";
|
||||
import {
|
||||
extractTriggerTimestamp,
|
||||
extractOriginalTitle,
|
||||
fetchGitHubData,
|
||||
filterCommentsToTriggerTime,
|
||||
filterReviewsToTriggerTime,
|
||||
@@ -10,7 +9,6 @@ import {
|
||||
import {
|
||||
createMockContext,
|
||||
mockIssueCommentContext,
|
||||
mockPullRequestCommentContext,
|
||||
mockPullRequestReviewContext,
|
||||
mockPullRequestReviewCommentContext,
|
||||
mockPullRequestOpenedContext,
|
||||
@@ -65,47 +63,6 @@ describe("extractTriggerTimestamp", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractOriginalTitle", () => {
|
||||
it("should extract title from IssueCommentEvent on PR", () => {
|
||||
const title = extractOriginalTitle(mockPullRequestCommentContext);
|
||||
expect(title).toBe("Fix: Memory leak in user service");
|
||||
});
|
||||
|
||||
it("should extract title from PullRequestReviewEvent", () => {
|
||||
const title = extractOriginalTitle(mockPullRequestReviewContext);
|
||||
expect(title).toBe("Refactor: Improve error handling in API layer");
|
||||
});
|
||||
|
||||
it("should extract title from PullRequestReviewCommentEvent", () => {
|
||||
const title = extractOriginalTitle(mockPullRequestReviewCommentContext);
|
||||
expect(title).toBe("Performance: Optimize search algorithm");
|
||||
});
|
||||
|
||||
it("should extract title from pull_request event", () => {
|
||||
const title = extractOriginalTitle(mockPullRequestOpenedContext);
|
||||
expect(title).toBe("Feature: Add user authentication");
|
||||
});
|
||||
|
||||
it("should extract title from issues event", () => {
|
||||
const title = extractOriginalTitle(mockIssueOpenedContext);
|
||||
expect(title).toBe("Bug: Application crashes on startup");
|
||||
});
|
||||
|
||||
it("should return undefined for event without title", () => {
|
||||
const context = createMockContext({
|
||||
eventName: "issue_comment",
|
||||
payload: {
|
||||
comment: {
|
||||
id: 123,
|
||||
body: "test",
|
||||
},
|
||||
} as any,
|
||||
});
|
||||
const title = extractOriginalTitle(context);
|
||||
expect(title).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("filterCommentsToTriggerTime", () => {
|
||||
const createMockComment = (
|
||||
createdAt: string,
|
||||
@@ -988,115 +945,4 @@ describe("fetchGitHubData integration with time filtering", () => {
|
||||
);
|
||||
expect(hasPrBodyInMap).toBe(false);
|
||||
});
|
||||
|
||||
it("should use originalTitle when provided instead of fetched title", async () => {
|
||||
const mockOctokits = {
|
||||
graphql: jest.fn().mockResolvedValue({
|
||||
repository: {
|
||||
pullRequest: {
|
||||
number: 123,
|
||||
title: "Fetched Title From GraphQL",
|
||||
body: "PR body",
|
||||
author: { login: "author" },
|
||||
createdAt: "2024-01-15T10:00:00Z",
|
||||
additions: 10,
|
||||
deletions: 5,
|
||||
state: "OPEN",
|
||||
commits: { totalCount: 1, nodes: [] },
|
||||
files: { nodes: [] },
|
||||
comments: { nodes: [] },
|
||||
reviews: { nodes: [] },
|
||||
},
|
||||
},
|
||||
user: { login: "trigger-user" },
|
||||
}),
|
||||
rest: jest.fn() as any,
|
||||
};
|
||||
|
||||
const result = await fetchGitHubData({
|
||||
octokits: mockOctokits as any,
|
||||
repository: "test-owner/test-repo",
|
||||
prNumber: "123",
|
||||
isPR: true,
|
||||
triggerUsername: "trigger-user",
|
||||
originalTitle: "Original Title From Webhook",
|
||||
});
|
||||
|
||||
expect(result.contextData.title).toBe("Original Title From Webhook");
|
||||
});
|
||||
|
||||
it("should use fetched title when originalTitle is not provided", async () => {
|
||||
const mockOctokits = {
|
||||
graphql: jest.fn().mockResolvedValue({
|
||||
repository: {
|
||||
pullRequest: {
|
||||
number: 123,
|
||||
title: "Fetched Title From GraphQL",
|
||||
body: "PR body",
|
||||
author: { login: "author" },
|
||||
createdAt: "2024-01-15T10:00:00Z",
|
||||
additions: 10,
|
||||
deletions: 5,
|
||||
state: "OPEN",
|
||||
commits: { totalCount: 1, nodes: [] },
|
||||
files: { nodes: [] },
|
||||
comments: { nodes: [] },
|
||||
reviews: { nodes: [] },
|
||||
},
|
||||
},
|
||||
user: { login: "trigger-user" },
|
||||
}),
|
||||
rest: jest.fn() as any,
|
||||
};
|
||||
|
||||
const result = await fetchGitHubData({
|
||||
octokits: mockOctokits as any,
|
||||
repository: "test-owner/test-repo",
|
||||
prNumber: "123",
|
||||
isPR: true,
|
||||
triggerUsername: "trigger-user",
|
||||
});
|
||||
|
||||
expect(result.contextData.title).toBe("Fetched Title From GraphQL");
|
||||
});
|
||||
|
||||
it("should use original title from webhook even if title was edited after trigger", async () => {
|
||||
const mockOctokits = {
|
||||
graphql: jest.fn().mockResolvedValue({
|
||||
repository: {
|
||||
pullRequest: {
|
||||
number: 123,
|
||||
title: "Edited Title (from GraphQL)",
|
||||
body: "PR body",
|
||||
author: { login: "author" },
|
||||
createdAt: "2024-01-15T10:00:00Z",
|
||||
lastEditedAt: "2024-01-15T12:30:00Z", // Edited after trigger
|
||||
additions: 10,
|
||||
deletions: 5,
|
||||
state: "OPEN",
|
||||
commits: { totalCount: 1, nodes: [] },
|
||||
files: { nodes: [] },
|
||||
comments: { nodes: [] },
|
||||
reviews: { nodes: [] },
|
||||
},
|
||||
},
|
||||
user: { login: "trigger-user" },
|
||||
}),
|
||||
rest: jest.fn() as any,
|
||||
};
|
||||
|
||||
const result = await fetchGitHubData({
|
||||
octokits: mockOctokits as any,
|
||||
repository: "test-owner/test-repo",
|
||||
prNumber: "123",
|
||||
isPR: true,
|
||||
triggerUsername: "trigger-user",
|
||||
triggerTime: "2024-01-15T12:00:00Z",
|
||||
originalTitle: "Original Title (from webhook at trigger time)",
|
||||
});
|
||||
|
||||
expect(result.contextData.title).toBe(
|
||||
"Original Title (from webhook at trigger time)",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,9 +28,6 @@ describe("formatContext", () => {
|
||||
additions: 50,
|
||||
deletions: 30,
|
||||
state: "OPEN",
|
||||
labels: {
|
||||
nodes: [],
|
||||
},
|
||||
commits: {
|
||||
totalCount: 3,
|
||||
nodes: [],
|
||||
@@ -66,9 +63,6 @@ Changed Files: 2 files`,
|
||||
author: { login: "test-user" },
|
||||
createdAt: "2023-01-01T00:00:00Z",
|
||||
state: "OPEN",
|
||||
labels: {
|
||||
nodes: [],
|
||||
},
|
||||
comments: {
|
||||
nodes: [],
|
||||
},
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { extractUserRequest } from "../src/utils/extract-user-request";
|
||||
|
||||
describe("extractUserRequest", () => {
|
||||
test("extracts text after @claude trigger", () => {
|
||||
expect(extractUserRequest("@claude /review-pr", "@claude")).toBe(
|
||||
"/review-pr",
|
||||
);
|
||||
});
|
||||
|
||||
test("extracts slash command with arguments", () => {
|
||||
expect(
|
||||
extractUserRequest(
|
||||
"@claude /review-pr please check the auth module",
|
||||
"@claude",
|
||||
),
|
||||
).toBe("/review-pr please check the auth module");
|
||||
});
|
||||
|
||||
test("handles trigger phrase with extra whitespace", () => {
|
||||
expect(extractUserRequest("@claude /review-pr", "@claude")).toBe(
|
||||
"/review-pr",
|
||||
);
|
||||
});
|
||||
|
||||
test("handles trigger phrase at start of multiline comment", () => {
|
||||
const comment = `@claude /review-pr
|
||||
Please review this PR carefully.
|
||||
Focus on security issues.`;
|
||||
expect(extractUserRequest(comment, "@claude")).toBe(
|
||||
`/review-pr
|
||||
Please review this PR carefully.
|
||||
Focus on security issues.`,
|
||||
);
|
||||
});
|
||||
|
||||
test("handles trigger phrase in middle of text", () => {
|
||||
expect(
|
||||
extractUserRequest("Hey team, @claude can you review this?", "@claude"),
|
||||
).toBe("can you review this?");
|
||||
});
|
||||
|
||||
test("returns null for empty comment body", () => {
|
||||
expect(extractUserRequest("", "@claude")).toBeNull();
|
||||
});
|
||||
|
||||
test("returns null for undefined comment body", () => {
|
||||
expect(extractUserRequest(undefined, "@claude")).toBeNull();
|
||||
});
|
||||
|
||||
test("returns null when trigger phrase not found", () => {
|
||||
expect(extractUserRequest("Please review this PR", "@claude")).toBeNull();
|
||||
});
|
||||
|
||||
test("returns null when only trigger phrase with no request", () => {
|
||||
expect(extractUserRequest("@claude", "@claude")).toBeNull();
|
||||
});
|
||||
|
||||
test("handles custom trigger phrase", () => {
|
||||
expect(extractUserRequest("/claude help me", "/claude")).toBe("help me");
|
||||
});
|
||||
|
||||
test("handles trigger phrase with special regex characters", () => {
|
||||
expect(
|
||||
extractUserRequest("@claude[bot] do something", "@claude[bot]"),
|
||||
).toBe("do something");
|
||||
});
|
||||
|
||||
test("is case insensitive", () => {
|
||||
expect(extractUserRequest("@CLAUDE /review-pr", "@claude")).toBe(
|
||||
"/review-pr",
|
||||
);
|
||||
expect(extractUserRequest("@Claude /review-pr", "@claude")).toBe(
|
||||
"/review-pr",
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,214 +0,0 @@
|
||||
import { describe, expect, it, beforeAll, afterAll } from "bun:test";
|
||||
import { validatePathWithinRepo } from "../src/mcp/path-validation";
|
||||
import { resolve } from "path";
|
||||
import { mkdir, writeFile, symlink, rm, realpath } from "fs/promises";
|
||||
import { tmpdir } from "os";
|
||||
|
||||
describe("validatePathWithinRepo", () => {
|
||||
// Use a real temp directory for tests that need filesystem access
|
||||
let testDir: string;
|
||||
let repoRoot: string;
|
||||
let outsideDir: string;
|
||||
// Real paths after symlink resolution (e.g., /tmp -> /private/tmp on macOS)
|
||||
let realRepoRoot: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create test directory structure
|
||||
testDir = resolve(tmpdir(), `path-validation-test-${Date.now()}`);
|
||||
repoRoot = resolve(testDir, "repo");
|
||||
outsideDir = resolve(testDir, "outside");
|
||||
|
||||
await mkdir(repoRoot, { recursive: true });
|
||||
await mkdir(resolve(repoRoot, "src"), { recursive: true });
|
||||
await mkdir(outsideDir, { recursive: true });
|
||||
|
||||
// Create test files
|
||||
await writeFile(resolve(repoRoot, "file.txt"), "inside repo");
|
||||
await writeFile(resolve(repoRoot, "src", "main.js"), "console.log('hi')");
|
||||
await writeFile(resolve(outsideDir, "secret.txt"), "sensitive data");
|
||||
|
||||
// Get real paths after symlink resolution
|
||||
realRepoRoot = await realpath(repoRoot);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Cleanup
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("valid paths", () => {
|
||||
it("should accept simple relative paths", async () => {
|
||||
const result = await validatePathWithinRepo("file.txt", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||
});
|
||||
|
||||
it("should accept nested relative paths", async () => {
|
||||
const result = await validatePathWithinRepo("src/main.js", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||
});
|
||||
|
||||
it("should accept paths with single dot segments", async () => {
|
||||
const result = await validatePathWithinRepo("./src/main.js", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||
});
|
||||
|
||||
it("should accept paths that use .. but resolve inside repo", async () => {
|
||||
// src/../file.txt resolves to file.txt which is still inside repo
|
||||
const result = await validatePathWithinRepo("src/../file.txt", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||
});
|
||||
|
||||
it("should accept absolute paths within the repo root", async () => {
|
||||
const absolutePath = resolve(repoRoot, "file.txt");
|
||||
const result = await validatePathWithinRepo(absolutePath, repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||
});
|
||||
|
||||
it("should accept the repo root itself", async () => {
|
||||
const result = await validatePathWithinRepo(".", repoRoot);
|
||||
expect(result).toBe(realRepoRoot);
|
||||
});
|
||||
|
||||
it("should handle new files (non-existent) in valid directories", async () => {
|
||||
const result = await validatePathWithinRepo("src/newfile.js", repoRoot);
|
||||
// For non-existent files, we validate the parent but return the initial path
|
||||
// (can't realpath a file that doesn't exist yet)
|
||||
expect(result).toBe(resolve(repoRoot, "src/newfile.js"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("path traversal attacks", () => {
|
||||
it("should reject simple parent directory traversal", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("../outside/secret.txt", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
|
||||
it("should reject deeply nested parent directory traversal", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("../../../etc/passwd", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
|
||||
it("should reject traversal hidden within path", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("src/../../outside/secret.txt", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
|
||||
it("should reject traversal at the end of path", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("src/../..", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
|
||||
it("should reject absolute paths outside the repo root", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("/etc/passwd", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
|
||||
it("should reject absolute paths to sibling directories", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo(resolve(outsideDir, "secret.txt"), repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("symlink attacks", () => {
|
||||
it("should reject symlinks pointing outside the repo", async () => {
|
||||
// Create a symlink inside the repo that points to a file outside
|
||||
const symlinkPath = resolve(repoRoot, "evil-link");
|
||||
await symlink(resolve(outsideDir, "secret.txt"), symlinkPath);
|
||||
|
||||
try {
|
||||
// The symlink path looks like it's inside the repo, but points outside
|
||||
await expect(
|
||||
validatePathWithinRepo("evil-link", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
} finally {
|
||||
await rm(symlinkPath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("should reject symlinks to parent directories", async () => {
|
||||
// Create a symlink to the parent directory
|
||||
const symlinkPath = resolve(repoRoot, "parent-link");
|
||||
await symlink(testDir, symlinkPath);
|
||||
|
||||
try {
|
||||
await expect(
|
||||
validatePathWithinRepo("parent-link/outside/secret.txt", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
} finally {
|
||||
await rm(symlinkPath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("should accept symlinks that resolve within the repo", async () => {
|
||||
// Create a symlink inside the repo that points to another file inside
|
||||
const symlinkPath = resolve(repoRoot, "good-link");
|
||||
await symlink(resolve(repoRoot, "file.txt"), symlinkPath);
|
||||
|
||||
try {
|
||||
const result = await validatePathWithinRepo("good-link", repoRoot);
|
||||
// Should resolve to the actual file location
|
||||
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||
} finally {
|
||||
await rm(symlinkPath, { force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("should reject directory symlinks that escape the repo", async () => {
|
||||
// Create a symlink to outside directory
|
||||
const symlinkPath = resolve(repoRoot, "escape-dir");
|
||||
await symlink(outsideDir, symlinkPath);
|
||||
|
||||
try {
|
||||
await expect(
|
||||
validatePathWithinRepo("escape-dir/secret.txt", repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
} finally {
|
||||
await rm(symlinkPath, { force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("edge cases", () => {
|
||||
it("should handle empty path (current directory)", async () => {
|
||||
const result = await validatePathWithinRepo("", repoRoot);
|
||||
expect(result).toBe(realRepoRoot);
|
||||
});
|
||||
|
||||
it("should handle paths with multiple consecutive slashes", async () => {
|
||||
const result = await validatePathWithinRepo("src//main.js", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", async () => {
|
||||
const result = await validatePathWithinRepo("src/", repoRoot);
|
||||
expect(result).toBe(resolve(realRepoRoot, "src"));
|
||||
});
|
||||
|
||||
it("should reject prefix attack (repo root as prefix but not parent)", async () => {
|
||||
// Create a sibling directory with repo name as prefix
|
||||
const evilDir = repoRoot + "-evil";
|
||||
await mkdir(evilDir, { recursive: true });
|
||||
await writeFile(resolve(evilDir, "file.txt"), "evil");
|
||||
|
||||
try {
|
||||
await expect(
|
||||
validatePathWithinRepo(resolve(evilDir, "file.txt"), repoRoot),
|
||||
).rejects.toThrow(/resolves outside the repository root/);
|
||||
} finally {
|
||||
await rm(evilDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw error for non-existent repo root", async () => {
|
||||
await expect(
|
||||
validatePathWithinRepo("file.txt", "/nonexistent/repo"),
|
||||
).rejects.toThrow(/does not exist/);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -32,7 +32,6 @@ describe("prepareMcpConfig", () => {
|
||||
branchPrefix: "",
|
||||
useStickyComment: false,
|
||||
useCommitSigning: false,
|
||||
sshSigningKey: "",
|
||||
botId: String(CLAUDE_APP_BOT_ID),
|
||||
botName: CLAUDE_BOT_LOGIN,
|
||||
allowedBots: "",
|
||||
|
||||
@@ -20,7 +20,6 @@ const defaultInputs = {
|
||||
branchPrefix: "claude/",
|
||||
useStickyComment: false,
|
||||
useCommitSigning: false,
|
||||
sshSigningKey: "",
|
||||
botId: String(CLAUDE_APP_BOT_ID),
|
||||
botName: CLAUDE_BOT_LOGIN,
|
||||
allowedBots: "",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { detectMode } from "../../src/modes/detector";
|
||||
import type { GitHubContext } from "../../src/github/context";
|
||||
import { isPushEvent } from "../../src/github/context";
|
||||
|
||||
describe("detectMode with enhanced routing", () => {
|
||||
const baseContext = {
|
||||
@@ -20,7 +21,6 @@ describe("detectMode with enhanced routing", () => {
|
||||
branchPrefix: "claude/",
|
||||
useStickyComment: false,
|
||||
useCommitSigning: false,
|
||||
sshSigningKey: "",
|
||||
botId: "123456",
|
||||
botName: "claude-bot",
|
||||
allowedBots: "",
|
||||
@@ -258,4 +258,65 @@ describe("detectMode with enhanced routing", () => {
|
||||
expect(detectMode(context)).toBe("tag");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Push Events", () => {
|
||||
it("should use agent mode for push events", () => {
|
||||
const context: GitHubContext = {
|
||||
...baseContext,
|
||||
eventName: "push",
|
||||
payload: {} as any,
|
||||
inputs: { ...baseContext.inputs, prompt: "Merge main into stale PRs" },
|
||||
};
|
||||
|
||||
expect(detectMode(context)).toBe("agent");
|
||||
});
|
||||
|
||||
it("should throw error when track_progress is used with push event", () => {
|
||||
const context: GitHubContext = {
|
||||
...baseContext,
|
||||
eventName: "push",
|
||||
payload: {} as any,
|
||||
inputs: { ...baseContext.inputs, trackProgress: true },
|
||||
};
|
||||
|
||||
expect(() => detectMode(context)).toThrow(
|
||||
/track_progress is only supported /,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isPushEvent type guard", () => {
|
||||
it("should return true for push events", () => {
|
||||
const context: GitHubContext = {
|
||||
...baseContext,
|
||||
eventName: "push",
|
||||
payload: {} as any,
|
||||
};
|
||||
|
||||
expect(isPushEvent(context)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-push events", () => {
|
||||
const issueContext: GitHubContext = {
|
||||
...baseContext,
|
||||
eventName: "issues",
|
||||
eventAction: "opened",
|
||||
payload: { issue: { number: 1, body: "Test" } } as any,
|
||||
entityNumber: 1,
|
||||
isPR: false,
|
||||
};
|
||||
|
||||
expect(isPushEvent(issueContext)).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false for workflow_dispatch events", () => {
|
||||
const context: GitHubContext = {
|
||||
...baseContext,
|
||||
eventName: "workflow_dispatch",
|
||||
payload: {} as any,
|
||||
};
|
||||
|
||||
expect(isPushEvent(context)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -60,6 +60,15 @@ describe("Mode Registry", () => {
|
||||
expect(mode.name).toBe("agent");
|
||||
});
|
||||
|
||||
test("getMode auto-detects agent for push event", () => {
|
||||
const pushContext = createMockAutomationContext({
|
||||
eventName: "push",
|
||||
});
|
||||
const mode = getMode(pushContext);
|
||||
expect(mode).toBe(agentMode);
|
||||
expect(mode.name).toBe("agent");
|
||||
});
|
||||
|
||||
test("getMode auto-detects agent for repository_dispatch with client_payload", () => {
|
||||
const contextWithPayload = createMockAutomationContext({
|
||||
eventName: "repository_dispatch",
|
||||
|
||||
@@ -68,7 +68,6 @@ describe("checkWritePermissions", () => {
|
||||
branchPrefix: "claude/",
|
||||
useStickyComment: false,
|
||||
useCommitSigning: false,
|
||||
sshSigningKey: "",
|
||||
botId: String(CLAUDE_APP_BOT_ID),
|
||||
botName: CLAUDE_BOT_LOGIN,
|
||||
allowedBots: "",
|
||||
|
||||
@@ -87,7 +87,6 @@ describe("pull_request_target event support", () => {
|
||||
},
|
||||
comments: { nodes: [] },
|
||||
reviews: { nodes: [] },
|
||||
labels: { nodes: [] },
|
||||
},
|
||||
comments: [],
|
||||
changedFiles: [],
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import {
|
||||
describe,
|
||||
test,
|
||||
expect,
|
||||
afterEach,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
} from "bun:test";
|
||||
import { mkdir, writeFile, rm, readFile, stat } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
|
||||
describe("SSH Signing", () => {
|
||||
// Use a temp directory for tests
|
||||
const testTmpDir = join(tmpdir(), "claude-ssh-signing-test");
|
||||
const testSshDir = join(testTmpDir, ".ssh");
|
||||
const testKeyPath = join(testSshDir, "claude_signing_key");
|
||||
const testKey =
|
||||
"-----BEGIN OPENSSH PRIVATE KEY-----\ntest-key-content\n-----END OPENSSH PRIVATE KEY-----";
|
||||
|
||||
beforeAll(async () => {
|
||||
await mkdir(testTmpDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await rm(testTmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test key if it exists
|
||||
try {
|
||||
await rm(testKeyPath, { force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe("setupSshSigning file operations", () => {
|
||||
test("should write key file atomically with correct permissions", async () => {
|
||||
// Create the directory with secure permissions (same as setupSshSigning does)
|
||||
await mkdir(testSshDir, { recursive: true, mode: 0o700 });
|
||||
|
||||
// Write key atomically with proper permissions (same as setupSshSigning does)
|
||||
await writeFile(testKeyPath, testKey, { mode: 0o600 });
|
||||
|
||||
// Verify key was written
|
||||
const keyContent = await readFile(testKeyPath, "utf-8");
|
||||
expect(keyContent).toBe(testKey);
|
||||
|
||||
// Verify permissions (0o600 = 384 in decimal for permission bits only)
|
||||
const stats = await stat(testKeyPath);
|
||||
const permissions = stats.mode & 0o777; // Get only permission bits
|
||||
expect(permissions).toBe(0o600);
|
||||
});
|
||||
|
||||
test("should create .ssh directory with secure permissions", async () => {
|
||||
// Clean up first
|
||||
await rm(testSshDir, { recursive: true, force: true });
|
||||
|
||||
// Create directory with secure permissions (same as setupSshSigning does)
|
||||
await mkdir(testSshDir, { recursive: true, mode: 0o700 });
|
||||
|
||||
// Verify directory exists
|
||||
const dirStats = await stat(testSshDir);
|
||||
expect(dirStats.isDirectory()).toBe(true);
|
||||
|
||||
// Verify directory permissions
|
||||
const dirPermissions = dirStats.mode & 0o777;
|
||||
expect(dirPermissions).toBe(0o700);
|
||||
});
|
||||
});
|
||||
|
||||
describe("setupSshSigning validation", () => {
|
||||
test("should reject empty SSH key", () => {
|
||||
const emptyKey = "";
|
||||
expect(() => {
|
||||
if (!emptyKey.trim()) {
|
||||
throw new Error("SSH signing key cannot be empty");
|
||||
}
|
||||
}).toThrow("SSH signing key cannot be empty");
|
||||
});
|
||||
|
||||
test("should reject whitespace-only SSH key", () => {
|
||||
const whitespaceKey = " \n\t ";
|
||||
expect(() => {
|
||||
if (!whitespaceKey.trim()) {
|
||||
throw new Error("SSH signing key cannot be empty");
|
||||
}
|
||||
}).toThrow("SSH signing key cannot be empty");
|
||||
});
|
||||
|
||||
test("should reject invalid SSH key format", () => {
|
||||
const invalidKey = "not a valid key";
|
||||
expect(() => {
|
||||
if (
|
||||
!invalidKey.includes("BEGIN") ||
|
||||
!invalidKey.includes("PRIVATE KEY")
|
||||
) {
|
||||
throw new Error("Invalid SSH private key format");
|
||||
}
|
||||
}).toThrow("Invalid SSH private key format");
|
||||
});
|
||||
|
||||
test("should accept valid SSH key format", () => {
|
||||
const validKey =
|
||||
"-----BEGIN OPENSSH PRIVATE KEY-----\nkey-content\n-----END OPENSSH PRIVATE KEY-----";
|
||||
expect(() => {
|
||||
if (!validKey.trim()) {
|
||||
throw new Error("SSH signing key cannot be empty");
|
||||
}
|
||||
if (!validKey.includes("BEGIN") || !validKey.includes("PRIVATE KEY")) {
|
||||
throw new Error("Invalid SSH private key format");
|
||||
}
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cleanupSshSigning file operations", () => {
|
||||
test("should remove the signing key file", async () => {
|
||||
// Create the key file first
|
||||
await mkdir(testSshDir, { recursive: true });
|
||||
await writeFile(testKeyPath, testKey, { mode: 0o600 });
|
||||
|
||||
// Verify it exists
|
||||
const existsBefore = await stat(testKeyPath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(existsBefore).toBe(true);
|
||||
|
||||
// Clean up (same operation as cleanupSshSigning)
|
||||
await rm(testKeyPath, { force: true });
|
||||
|
||||
// Verify it's gone
|
||||
const existsAfter = await stat(testKeyPath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(existsAfter).toBe(false);
|
||||
});
|
||||
|
||||
test("should not throw if key file does not exist", async () => {
|
||||
// Make sure file doesn't exist
|
||||
await rm(testKeyPath, { force: true });
|
||||
|
||||
// Should not throw (rm with force: true doesn't throw on missing files)
|
||||
await expect(rm(testKeyPath, { force: true })).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("SSH Signing Mode Detection", () => {
|
||||
test("sshSigningKey should take precedence over useCommitSigning", () => {
|
||||
// When both are set, SSH signing takes precedence
|
||||
const sshSigningKey = "test-key";
|
||||
const useCommitSigning = true;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useSshSigning).toBe(true);
|
||||
expect(useApiCommitSigning).toBe(false);
|
||||
});
|
||||
|
||||
test("useCommitSigning should work when sshSigningKey is not set", () => {
|
||||
const sshSigningKey = "";
|
||||
const useCommitSigning = true;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useSshSigning).toBe(false);
|
||||
expect(useApiCommitSigning).toBe(true);
|
||||
});
|
||||
|
||||
test("neither signing method when both are false/empty", () => {
|
||||
const sshSigningKey = "";
|
||||
const useCommitSigning = false;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useSshSigning).toBe(false);
|
||||
expect(useApiCommitSigning).toBe(false);
|
||||
});
|
||||
|
||||
test("git CLI tools should be used when sshSigningKey is set", () => {
|
||||
// This tests the logic in tag mode for tool selection
|
||||
const sshSigningKey = "test-key";
|
||||
const useCommitSigning = true; // Even if this is true
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
// When SSH signing is used, we should use git CLI (not API)
|
||||
const shouldUseGitCli = !useApiCommitSigning;
|
||||
expect(shouldUseGitCli).toBe(true);
|
||||
});
|
||||
|
||||
test("MCP file ops should only be used with API commit signing", () => {
|
||||
// Case 1: API commit signing
|
||||
{
|
||||
const sshSigningKey = "";
|
||||
const useCommitSigning = true;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useApiCommitSigning).toBe(true);
|
||||
}
|
||||
|
||||
// Case 2: SSH signing (should NOT use API)
|
||||
{
|
||||
const sshSigningKey = "test-key";
|
||||
const useCommitSigning = true;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useApiCommitSigning).toBe(false);
|
||||
}
|
||||
|
||||
// Case 3: No signing (should NOT use API)
|
||||
{
|
||||
const sshSigningKey = "";
|
||||
const useCommitSigning = false;
|
||||
|
||||
const useSshSigning = !!sshSigningKey;
|
||||
const useApiCommitSigning = useCommitSigning && !useSshSigning;
|
||||
|
||||
expect(useApiCommitSigning).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Context parsing", () => {
|
||||
test("sshSigningKey should be parsed from environment", () => {
|
||||
// Test that context.ts parses SSH_SIGNING_KEY correctly
|
||||
const testCases = [
|
||||
{ env: "test-key", expected: "test-key" },
|
||||
{ env: "", expected: "" },
|
||||
{ env: undefined, expected: "" },
|
||||
];
|
||||
|
||||
for (const { env, expected } of testCases) {
|
||||
const result = env || "";
|
||||
expect(result).toBe(expected);
|
||||
}
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user