mirror of
https://github.com/anthropics/claude-code-action.git
synced 2026-01-25 16:24:12 +08:00
Compare commits
13 Commits
v1.0.28
...
claude/aut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0085208689 | ||
|
|
4778aeae4c | ||
|
|
b6e5a9f27a | ||
|
|
5d91d7d217 | ||
|
|
90006bcae7 | ||
|
|
005436f51d | ||
|
|
1b8ee3b941 | ||
|
|
c247cb152d | ||
|
|
cefa60067a | ||
|
|
7a708f68fa | ||
|
|
5da7ba548c | ||
|
|
964b8355fb | ||
|
|
c83d67a9b9 |
@@ -17,7 +17,6 @@ TASK OVERVIEW:
|
|||||||
1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else.
|
1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else.
|
||||||
|
|
||||||
2. Next, use gh commands to get context about the issue:
|
2. Next, use gh commands to get context about the issue:
|
||||||
|
|
||||||
- Use `gh issue view ${{ github.event.issue.number }}` to retrieve the current issue's details
|
- Use `gh issue view ${{ github.event.issue.number }}` to retrieve the current issue's details
|
||||||
- Use `gh search issues` to find similar issues that might provide context for proper categorization
|
- Use `gh search issues` to find similar issues that might provide context for proper categorization
|
||||||
- You have access to these Bash commands:
|
- You have access to these Bash commands:
|
||||||
@@ -27,7 +26,6 @@ TASK OVERVIEW:
|
|||||||
- Bash(gh search:\*) - to search for similar issues
|
- Bash(gh search:\*) - to search for similar issues
|
||||||
|
|
||||||
3. Analyze the issue content, considering:
|
3. Analyze the issue content, considering:
|
||||||
|
|
||||||
- The issue title and description
|
- The issue title and description
|
||||||
- The type of issue (bug report, feature request, question, etc.)
|
- The type of issue (bug report, feature request, question, etc.)
|
||||||
- Technical areas mentioned
|
- Technical areas mentioned
|
||||||
@@ -36,7 +34,6 @@ TASK OVERVIEW:
|
|||||||
- Components affected
|
- Components affected
|
||||||
|
|
||||||
4. Select appropriate labels from the available labels list provided above:
|
4. Select appropriate labels from the available labels list provided above:
|
||||||
|
|
||||||
- Choose labels that accurately reflect the issue's nature
|
- Choose labels that accurately reflect the issue's nature
|
||||||
- Be specific but comprehensive
|
- Be specific but comprehensive
|
||||||
- IMPORTANT: Add a priority label (P1, P2, or P3) based on the label descriptions from gh label list
|
- IMPORTANT: Add a priority label (P1, P2, or P3) based on the label descriptions from gh label list
|
||||||
|
|||||||
1
.github/workflows/issue-triage.yml
vendored
1
.github/workflows/issue-triage.yml
vendored
@@ -24,4 +24,5 @@ jobs:
|
|||||||
prompt: "/label-issue REPO: ${{ github.repository }} ISSUE_NUMBER${{ github.event.issue.number }}"
|
prompt: "/label-issue REPO: ${{ github.repository }} ISSUE_NUMBER${{ github.event.issue.number }}"
|
||||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||||
allowed_non_write_users: "*" # Required for issue triage workflow, if users without repo write access create issues
|
allowed_non_write_users: "*" # Required for issue triage workflow, if users without repo write access create issues
|
||||||
|
bypass_write_permission_check_acknowledgment: true # Required when using wildcard
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
12
action.yml
12
action.yml
@@ -23,6 +23,10 @@ inputs:
|
|||||||
description: "The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format)"
|
description: "The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format)"
|
||||||
required: false
|
required: false
|
||||||
default: "claude/"
|
default: "claude/"
|
||||||
|
branch_name_template:
|
||||||
|
description: "Template for branch naming. Available variables: {{prefix}}, {{entityType}}, {{entityNumber}}, {{timestamp}}, {{sha}}, {{label}}, {{description}}. {{label}} will be first label from the issue/PR, or {{entityType}} as a fallback. {{description}} will be the first 5 words of the issue/PR title in kebab-case. Default: '{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}'"
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
allowed_bots:
|
allowed_bots:
|
||||||
description: "Comma-separated list of allowed bot usernames, or '*' to allow all bots. Empty string (default) allows no bots."
|
description: "Comma-separated list of allowed bot usernames, or '*' to allow all bots. Empty string (default) allows no bots."
|
||||||
required: false
|
required: false
|
||||||
@@ -31,6 +35,10 @@ inputs:
|
|||||||
description: "Comma-separated list of usernames to allow without write permissions, or '*' to allow all users. Only works when github_token input is provided. WARNING: Use with extreme caution - this bypasses security checks and should only be used for workflows with very limited permissions (e.g., issue labeling)."
|
description: "Comma-separated list of usernames to allow without write permissions, or '*' to allow all users. Only works when github_token input is provided. WARNING: Use with extreme caution - this bypasses security checks and should only be used for workflows with very limited permissions (e.g., issue labeling)."
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
|
bypass_write_permission_check_acknowledgment:
|
||||||
|
description: "REQUIRED when using allowed_non_write_users='*'. Set to 'true' to explicitly acknowledge the security implications of bypassing write permission checks for all users. This flag serves as a safeguard against accidental security misconfigurations."
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
# Claude Code configuration
|
# Claude Code configuration
|
||||||
prompt:
|
prompt:
|
||||||
@@ -178,9 +186,11 @@ runs:
|
|||||||
LABEL_TRIGGER: ${{ inputs.label_trigger }}
|
LABEL_TRIGGER: ${{ inputs.label_trigger }}
|
||||||
BASE_BRANCH: ${{ inputs.base_branch }}
|
BASE_BRANCH: ${{ inputs.base_branch }}
|
||||||
BRANCH_PREFIX: ${{ inputs.branch_prefix }}
|
BRANCH_PREFIX: ${{ inputs.branch_prefix }}
|
||||||
|
BRANCH_NAME_TEMPLATE: ${{ inputs.branch_name_template }}
|
||||||
OVERRIDE_GITHUB_TOKEN: ${{ inputs.github_token }}
|
OVERRIDE_GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||||
ALLOWED_BOTS: ${{ inputs.allowed_bots }}
|
ALLOWED_BOTS: ${{ inputs.allowed_bots }}
|
||||||
ALLOWED_NON_WRITE_USERS: ${{ inputs.allowed_non_write_users }}
|
ALLOWED_NON_WRITE_USERS: ${{ inputs.allowed_non_write_users }}
|
||||||
|
BYPASS_WRITE_PERMISSION_CHECK_ACKNOWLEDGMENT: ${{ inputs.bypass_write_permission_check_acknowledgment }}
|
||||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||||
USE_STICKY_COMMENT: ${{ inputs.use_sticky_comment }}
|
USE_STICKY_COMMENT: ${{ inputs.use_sticky_comment }}
|
||||||
DEFAULT_WORKFLOW_TOKEN: ${{ github.token }}
|
DEFAULT_WORKFLOW_TOKEN: ${{ github.token }}
|
||||||
@@ -208,7 +218,7 @@ runs:
|
|||||||
|
|
||||||
# Install Claude Code if no custom executable is provided
|
# Install Claude Code if no custom executable is provided
|
||||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||||
CLAUDE_CODE_VERSION="2.0.76"
|
CLAUDE_CODE_VERSION="2.1.6"
|
||||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||||
for attempt in 1 2 3; do
|
for attempt in 1 2 3; do
|
||||||
echo "Installation attempt $attempt..."
|
echo "Installation attempt $attempt..."
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ Thank you for your interest in contributing to Claude Code Base Action! This doc
|
|||||||
```
|
```
|
||||||
|
|
||||||
This script:
|
This script:
|
||||||
|
|
||||||
- Installs `act` if not present (requires Homebrew on macOS)
|
- Installs `act` if not present (requires Homebrew on macOS)
|
||||||
- Runs the GitHub Action workflow locally using Docker
|
- Runs the GitHub Action workflow locally using Docker
|
||||||
- Requires your `ANTHROPIC_API_KEY` to be set
|
- Requires your `ANTHROPIC_API_KEY` to be set
|
||||||
|
|||||||
@@ -85,26 +85,26 @@ Add the following to your workflow file:
|
|||||||
|
|
||||||
## Inputs
|
## Inputs
|
||||||
|
|
||||||
| Input | Description | Required | Default |
|
| Input | Description | Required | Default |
|
||||||
| ------------------------- | ----------------------------------------------------------------------------------------------------------------------- | -------- | ---------------------------- |
|
| ------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -------- | ---------------------------- |
|
||||||
| `prompt` | The prompt to send to Claude Code | No\* | '' |
|
| `prompt` | The prompt to send to Claude Code | No\* | '' |
|
||||||
| `prompt_file` | Path to a file containing the prompt to send to Claude Code | No\* | '' |
|
| `prompt_file` | Path to a file containing the prompt to send to Claude Code | No\* | '' |
|
||||||
| `allowed_tools` | Comma-separated list of allowed tools for Claude Code to use | No | '' |
|
| `allowed_tools` | Comma-separated list of allowed tools for Claude Code to use | No | '' |
|
||||||
| `disallowed_tools` | Comma-separated list of disallowed tools that Claude Code cannot use | No | '' |
|
| `disallowed_tools` | Comma-separated list of disallowed tools that Claude Code cannot use | No | '' |
|
||||||
| `max_turns` | Maximum number of conversation turns (default: no limit) | No | '' |
|
| `max_turns` | Maximum number of conversation turns (default: no limit) | No | '' |
|
||||||
| `mcp_config` | Path to the MCP configuration JSON file, or MCP configuration JSON string | No | '' |
|
| `mcp_config` | Path to the MCP configuration JSON file, or MCP configuration JSON string | No | '' |
|
||||||
| `settings` | Path to Claude Code settings JSON file, or settings JSON string | No | '' |
|
| `settings` | Path to Claude Code settings JSON file, or settings JSON string | No | '' |
|
||||||
| `system_prompt` | Override system prompt | No | '' |
|
| `system_prompt` | Override system prompt | No | '' |
|
||||||
| `append_system_prompt` | Append to system prompt | No | '' |
|
| `append_system_prompt` | Append to system prompt | No | '' |
|
||||||
| `claude_env` | Custom environment variables to pass to Claude Code execution (YAML multiline format) | No | '' |
|
| `claude_env` | Custom environment variables to pass to Claude Code execution (YAML multiline format) | No | '' |
|
||||||
| `model` | Model to use (provider-specific format required for Bedrock/Vertex) | No | 'claude-4-0-sonnet-20250219' |
|
| `model` | Model to use (provider-specific format required for Bedrock/Vertex) | No | 'claude-4-0-sonnet-20250219' |
|
||||||
| `anthropic_model` | DEPRECATED: Use 'model' instead | No | 'claude-4-0-sonnet-20250219' |
|
| `anthropic_model` | DEPRECATED: Use 'model' instead | No | 'claude-4-0-sonnet-20250219' |
|
||||||
| `fallback_model` | Enable automatic fallback to specified model when default model is overloaded | No | '' |
|
| `fallback_model` | Enable automatic fallback to specified model when default model is overloaded | No | '' |
|
||||||
| `anthropic_api_key` | Anthropic API key (required for direct Anthropic API) | No | '' |
|
| `anthropic_api_key` | Anthropic API key (required for direct Anthropic API) | No | '' |
|
||||||
| `claude_code_oauth_token` | Claude Code OAuth token (alternative to anthropic_api_key) | No | '' |
|
| `claude_code_oauth_token` | Claude Code OAuth token (alternative to anthropic_api_key) | No | '' |
|
||||||
| `use_bedrock` | Use Amazon Bedrock with OIDC authentication instead of direct Anthropic API | No | 'false' |
|
| `use_bedrock` | Use Amazon Bedrock with OIDC authentication instead of direct Anthropic API | No | 'false' |
|
||||||
| `use_vertex` | Use Google Vertex AI with OIDC authentication instead of direct Anthropic API | No | 'false' |
|
| `use_vertex` | Use Google Vertex AI with OIDC authentication instead of direct Anthropic API | No | 'false' |
|
||||||
| `use_node_cache` | Whether to use Node.js dependency caching (set to true only for Node.js projects with lock files) | No | 'false' |
|
| `use_node_cache` | Whether to use Node.js dependency caching (set to true only for Node.js projects with lock files) | No | 'false' |
|
||||||
| `show_full_output` | Show full JSON output (⚠️ May expose secrets - see [security docs](../docs/security.md#️-full-output-security-warning)) | No | 'false'\*\* |
|
| `show_full_output` | Show full JSON output (⚠️ May expose secrets - see [security docs](../docs/security.md#️-full-output-security-warning)) | No | 'false'\*\* |
|
||||||
|
|
||||||
\*Either `prompt` or `prompt_file` must be provided, but not both.
|
\*Either `prompt` or `prompt_file` must be provided, but not both.
|
||||||
@@ -490,7 +490,6 @@ This example shows how to use OIDC authentication with GCP Vertex AI:
|
|||||||
To securely use your Anthropic API key:
|
To securely use your Anthropic API key:
|
||||||
|
|
||||||
1. Add your API key as a repository secret:
|
1. Add your API key as a repository secret:
|
||||||
|
|
||||||
- Go to your repository's Settings
|
- Go to your repository's Settings
|
||||||
- Navigate to "Secrets and variables" → "Actions"
|
- Navigate to "Secrets and variables" → "Actions"
|
||||||
- Click "New repository secret"
|
- Click "New repository secret"
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ runs:
|
|||||||
PATH_TO_CLAUDE_CODE_EXECUTABLE: ${{ inputs.path_to_claude_code_executable }}
|
PATH_TO_CLAUDE_CODE_EXECUTABLE: ${{ inputs.path_to_claude_code_executable }}
|
||||||
run: |
|
run: |
|
||||||
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
if [ -z "$PATH_TO_CLAUDE_CODE_EXECUTABLE" ]; then
|
||||||
CLAUDE_CODE_VERSION="2.0.76"
|
CLAUDE_CODE_VERSION="2.1.6"
|
||||||
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
echo "Installing Claude Code v${CLAUDE_CODE_VERSION}..."
|
||||||
for attempt in 1 2 3; do
|
for attempt in 1 2 3; do
|
||||||
echo "Installation attempt $attempt..."
|
echo "Installation attempt $attempt..."
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"name": "@anthropic-ai/claude-code-base-action",
|
"name": "@anthropic-ai/claude-code-base-action",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.6",
|
||||||
"shell-quote": "^1.8.3",
|
"shell-quote": "^1.8.3",
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||||
|
|
||||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.6", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-lwswHo6z/Kh9djafk2ajPju62+VqHwJ23gueG1alfaLNK4GRYHgCROfiX6/wlxAd8sRvgTo6ry1hNzkyz7bOpw=="],
|
||||||
|
|
||||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.6",
|
||||||
"shell-quote": "^1.8.3"
|
"shell-quote": "^1.8.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
"name": "mcp-test",
|
"name": "mcp-test",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0"
|
"@modelcontextprotocol/sdk": "^1.24.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4
bun.lock
4
bun.lock
@@ -7,7 +7,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.1",
|
"@actions/github": "^6.0.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.6",
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||||
"@octokit/graphql": "^8.2.2",
|
"@octokit/graphql": "^8.2.2",
|
||||||
"@octokit/rest": "^21.1.1",
|
"@octokit/rest": "^21.1.1",
|
||||||
@@ -37,7 +37,7 @@
|
|||||||
|
|
||||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||||
|
|
||||||
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.1.76", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^3.24.1 || ^4.0.0" } }, "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw=="],
|
"@anthropic-ai/claude-agent-sdk": ["@anthropic-ai/claude-agent-sdk@0.2.6", "", { "optionalDependencies": { "@img/sharp-darwin-arm64": "^0.33.5", "@img/sharp-darwin-x64": "^0.33.5", "@img/sharp-linux-arm": "^0.33.5", "@img/sharp-linux-arm64": "^0.33.5", "@img/sharp-linux-x64": "^0.33.5", "@img/sharp-linuxmusl-arm64": "^0.33.5", "@img/sharp-linuxmusl-x64": "^0.33.5", "@img/sharp-win32-x64": "^0.33.5" }, "peerDependencies": { "zod": "^4.0.0" } }, "sha512-lwswHo6z/Kh9djafk2ajPju62+VqHwJ23gueG1alfaLNK4GRYHgCROfiX6/wlxAd8sRvgTo6ry1hNzkyz7bOpw=="],
|
||||||
|
|
||||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||||
|
|
||||||
|
|||||||
@@ -116,7 +116,6 @@ The `additional_permissions` input allows Claude to access GitHub Actions workfl
|
|||||||
To allow Claude to view workflow run results, job logs, and CI status:
|
To allow Claude to view workflow run results, job logs, and CI status:
|
||||||
|
|
||||||
1. **Grant the necessary permission to your GitHub token**:
|
1. **Grant the necessary permission to your GitHub token**:
|
||||||
|
|
||||||
- When using the default `GITHUB_TOKEN`, add the `actions: read` permission to your workflow:
|
- When using the default `GITHUB_TOKEN`, add the `actions: read` permission to your workflow:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
|||||||
@@ -228,12 +228,10 @@ jobs:
|
|||||||
The action now automatically detects the appropriate mode:
|
The action now automatically detects the appropriate mode:
|
||||||
|
|
||||||
1. **If `prompt` is provided** → Runs in **automation mode**
|
1. **If `prompt` is provided** → Runs in **automation mode**
|
||||||
|
|
||||||
- Executes immediately without waiting for @claude mentions
|
- Executes immediately without waiting for @claude mentions
|
||||||
- Perfect for scheduled tasks, PR automation, etc.
|
- Perfect for scheduled tasks, PR automation, etc.
|
||||||
|
|
||||||
2. **If no `prompt` but @claude is mentioned** → Runs in **interactive mode**
|
2. **If no `prompt` but @claude is mentioned** → Runs in **interactive mode**
|
||||||
|
|
||||||
- Waits for and responds to @claude mentions
|
- Waits for and responds to @claude mentions
|
||||||
- Creates tracking comments with progress
|
- Creates tracking comments with progress
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
- **⚠️ Non-Write User Access (RISKY)**: The `allowed_non_write_users` parameter allows bypassing the write permission requirement. **This is a significant security risk and should only be used for workflows with extremely limited permissions** (e.g., issue labeling workflows that only have `issues: write` permission). This feature:
|
- **⚠️ Non-Write User Access (RISKY)**: The `allowed_non_write_users` parameter allows bypassing the write permission requirement. **This is a significant security risk and should only be used for workflows with extremely limited permissions** (e.g., issue labeling workflows that only have `issues: write` permission). This feature:
|
||||||
- Only works when `github_token` is provided as input (not with GitHub App authentication)
|
- Only works when `github_token` is provided as input (not with GitHub App authentication)
|
||||||
- Accepts either a comma-separated list of specific usernames or `*` to allow all users
|
- Accepts either a comma-separated list of specific usernames or `*` to allow all users
|
||||||
|
- **When using the wildcard (`*`)**, you MUST also set `bypass_write_permission_check_acknowledgment: true` to explicitly acknowledge the security implications. Without this flag, the action will fail as a safeguard against accidental security misconfigurations
|
||||||
- **Should be used with extreme caution** as it bypasses the primary security mechanism of this action
|
- **Should be used with extreme caution** as it bypasses the primary security mechanism of this action
|
||||||
- Is designed for automation workflows where user permissions are already restricted by the workflow's permission scope
|
- Is designed for automation workflows where user permissions are already restricted by the workflow's permission scope
|
||||||
- **Token Permissions**: The GitHub app receives only a short-lived token scoped specifically to the repository it's operating in
|
- **Token Permissions**: The GitHub app receives only a short-lived token scoped specifically to the repository it's operating in
|
||||||
@@ -75,14 +76,12 @@ Commits will show as verified and attributed to the GitHub account that owns the
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. Add the **public key** to your GitHub account:
|
2. Add the **public key** to your GitHub account:
|
||||||
|
|
||||||
- Go to GitHub → Settings → SSH and GPG keys
|
- Go to GitHub → Settings → SSH and GPG keys
|
||||||
- Click "New SSH key"
|
- Click "New SSH key"
|
||||||
- Select **Key type: Signing Key** (important)
|
- Select **Key type: Signing Key** (important)
|
||||||
- Paste the contents of `~/.ssh/signing_key.pub`
|
- Paste the contents of `~/.ssh/signing_key.pub`
|
||||||
|
|
||||||
3. Add the **private key** to your repository secrets:
|
3. Add the **private key** to your repository secrets:
|
||||||
|
|
||||||
- Go to your repo → Settings → Secrets and variables → Actions
|
- Go to your repo → Settings → Secrets and variables → Actions
|
||||||
- Create a new secret named `SSH_SIGNING_KEY`
|
- Create a new secret named `SSH_SIGNING_KEY`
|
||||||
- Paste the contents of `~/.ssh/signing_key`
|
- Paste the contents of `~/.ssh/signing_key`
|
||||||
|
|||||||
@@ -31,27 +31,23 @@ The fastest way to create a custom GitHub App is using our pre-configured manife
|
|||||||
**🚀 [Download the Quick Setup Tool](./create-app.html)** (Right-click → "Save Link As" or "Download Linked File")
|
**🚀 [Download the Quick Setup Tool](./create-app.html)** (Right-click → "Save Link As" or "Download Linked File")
|
||||||
|
|
||||||
After downloading, open `create-app.html` in your web browser:
|
After downloading, open `create-app.html` in your web browser:
|
||||||
|
|
||||||
- **For Personal Accounts:** Click the "Create App for Personal Account" button
|
- **For Personal Accounts:** Click the "Create App for Personal Account" button
|
||||||
- **For Organizations:** Enter your organization name and click "Create App for Organization"
|
- **For Organizations:** Enter your organization name and click "Create App for Organization"
|
||||||
|
|
||||||
The tool will automatically configure all required permissions and submit the manifest.
|
The tool will automatically configure all required permissions and submit the manifest.
|
||||||
|
|
||||||
Alternatively, you can use the manifest file directly:
|
Alternatively, you can use the manifest file directly:
|
||||||
|
|
||||||
- Use the [`github-app-manifest.json`](../github-app-manifest.json) file from this repository
|
- Use the [`github-app-manifest.json`](../github-app-manifest.json) file from this repository
|
||||||
- Visit https://github.com/settings/apps/new (for personal) or your organization's app settings
|
- Visit https://github.com/settings/apps/new (for personal) or your organization's app settings
|
||||||
- Look for the "Create from manifest" option and paste the JSON content
|
- Look for the "Create from manifest" option and paste the JSON content
|
||||||
|
|
||||||
2. **Complete the creation flow:**
|
2. **Complete the creation flow:**
|
||||||
|
|
||||||
- GitHub will show you a preview of the app configuration
|
- GitHub will show you a preview of the app configuration
|
||||||
- Confirm the app name (you can customize it)
|
- Confirm the app name (you can customize it)
|
||||||
- Click "Create GitHub App"
|
- Click "Create GitHub App"
|
||||||
- The app will be created with all required permissions automatically configured
|
- The app will be created with all required permissions automatically configured
|
||||||
|
|
||||||
3. **Generate and download a private key:**
|
3. **Generate and download a private key:**
|
||||||
|
|
||||||
- After creating the app, you'll be redirected to the app settings
|
- After creating the app, you'll be redirected to the app settings
|
||||||
- Scroll down to "Private keys"
|
- Scroll down to "Private keys"
|
||||||
- Click "Generate a private key"
|
- Click "Generate a private key"
|
||||||
@@ -64,7 +60,6 @@ The fastest way to create a custom GitHub App is using our pre-configured manife
|
|||||||
If you prefer to configure the app manually or need custom permissions:
|
If you prefer to configure the app manually or need custom permissions:
|
||||||
|
|
||||||
1. **Create a new GitHub App:**
|
1. **Create a new GitHub App:**
|
||||||
|
|
||||||
- Go to https://github.com/settings/apps (for personal apps) or your organization's settings
|
- Go to https://github.com/settings/apps (for personal apps) or your organization's settings
|
||||||
- Click "New GitHub App"
|
- Click "New GitHub App"
|
||||||
- Configure the app with these minimum permissions:
|
- Configure the app with these minimum permissions:
|
||||||
@@ -77,19 +72,16 @@ If you prefer to configure the app manually or need custom permissions:
|
|||||||
- Create the app
|
- Create the app
|
||||||
|
|
||||||
2. **Generate and download a private key:**
|
2. **Generate and download a private key:**
|
||||||
|
|
||||||
- After creating the app, scroll down to "Private keys"
|
- After creating the app, scroll down to "Private keys"
|
||||||
- Click "Generate a private key"
|
- Click "Generate a private key"
|
||||||
- Download the `.pem` file (keep this secure!)
|
- Download the `.pem` file (keep this secure!)
|
||||||
|
|
||||||
3. **Install the app on your repository:**
|
3. **Install the app on your repository:**
|
||||||
|
|
||||||
- Go to the app's settings page
|
- Go to the app's settings page
|
||||||
- Click "Install App"
|
- Click "Install App"
|
||||||
- Select the repositories where you want to use Claude
|
- Select the repositories where you want to use Claude
|
||||||
|
|
||||||
4. **Add the app credentials to your repository secrets:**
|
4. **Add the app credentials to your repository secrets:**
|
||||||
|
|
||||||
- Go to your repository's Settings → Secrets and variables → Actions
|
- Go to your repository's Settings → Secrets and variables → Actions
|
||||||
- Add these secrets:
|
- Add these secrets:
|
||||||
- `APP_ID`: Your GitHub App's ID (found in the app settings)
|
- `APP_ID`: Your GitHub App's ID (found in the app settings)
|
||||||
@@ -138,7 +130,6 @@ For more information on creating GitHub Apps, see the [GitHub documentation](htt
|
|||||||
To securely use your Anthropic API key:
|
To securely use your Anthropic API key:
|
||||||
|
|
||||||
1. Add your API key as a repository secret:
|
1. Add your API key as a repository secret:
|
||||||
|
|
||||||
- Go to your repository's Settings
|
- Go to your repository's Settings
|
||||||
- Navigate to "Secrets and variables" → "Actions"
|
- Navigate to "Secrets and variables" → "Actions"
|
||||||
- Click "New repository secret"
|
- Click "New repository secret"
|
||||||
|
|||||||
@@ -52,35 +52,36 @@ jobs:
|
|||||||
|
|
||||||
## Inputs
|
## Inputs
|
||||||
|
|
||||||
| Input | Description | Required | Default |
|
| Input | Description | Required | Default |
|
||||||
| -------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | ------------- |
|
| ---------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | ------------- |
|
||||||
| `anthropic_api_key` | Anthropic API key (required for direct API, not needed for Bedrock/Vertex) | No\* | - |
|
| `anthropic_api_key` | Anthropic API key (required for direct API, not needed for Bedrock/Vertex) | No\* | - |
|
||||||
| `claude_code_oauth_token` | Claude Code OAuth token (alternative to anthropic_api_key) | No\* | - |
|
| `claude_code_oauth_token` | Claude Code OAuth token (alternative to anthropic_api_key) | No\* | - |
|
||||||
| `prompt` | Instructions for Claude. Can be a direct prompt or custom template for automation workflows | No | - |
|
| `prompt` | Instructions for Claude. Can be a direct prompt or custom template for automation workflows | No | - |
|
||||||
| `track_progress` | Force tag mode with tracking comments. Only works with specific PR/issue events. Preserves GitHub context | No | `false` |
|
| `track_progress` | Force tag mode with tracking comments. Only works with specific PR/issue events. Preserves GitHub context | No | `false` |
|
||||||
| `include_fix_links` | Include 'Fix this' links in PR code review feedback that open Claude Code with context to fix the identified issue | No | `true` |
|
| `include_fix_links` | Include 'Fix this' links in PR code review feedback that open Claude Code with context to fix the identified issue | No | `true` |
|
||||||
| `claude_args` | Additional [arguments to pass directly to Claude CLI](https://docs.claude.com/en/docs/claude-code/cli-reference#cli-flags) (e.g., `--max-turns 10 --model claude-4-0-sonnet-20250805`) | No | "" |
|
| `claude_args` | Additional [arguments to pass directly to Claude CLI](https://docs.claude.com/en/docs/claude-code/cli-reference#cli-flags) (e.g., `--max-turns 10 --model claude-4-0-sonnet-20250805`) | No | "" |
|
||||||
| `base_branch` | The base branch to use for creating new branches (e.g., 'main', 'develop') | No | - |
|
| `base_branch` | The base branch to use for creating new branches (e.g., 'main', 'develop') | No | - |
|
||||||
| `use_sticky_comment` | Use just one comment to deliver PR comments (only applies for pull_request event workflows) | No | `false` |
|
| `use_sticky_comment` | Use just one comment to deliver PR comments (only applies for pull_request event workflows) | No | `false` |
|
||||||
| `github_token` | GitHub token for Claude to operate with. **Only include this if you're connecting a custom GitHub app of your own!** | No | - |
|
| `github_token` | GitHub token for Claude to operate with. **Only include this if you're connecting a custom GitHub app of your own!** | No | - |
|
||||||
| `use_bedrock` | Use Amazon Bedrock with OIDC authentication instead of direct Anthropic API | No | `false` |
|
| `use_bedrock` | Use Amazon Bedrock with OIDC authentication instead of direct Anthropic API | No | `false` |
|
||||||
| `use_vertex` | Use Google Vertex AI with OIDC authentication instead of direct Anthropic API | No | `false` |
|
| `use_vertex` | Use Google Vertex AI with OIDC authentication instead of direct Anthropic API | No | `false` |
|
||||||
| `assignee_trigger` | The assignee username that triggers the action (e.g. @claude). Only used for issue assignment | No | - |
|
| `assignee_trigger` | The assignee username that triggers the action (e.g. @claude). Only used for issue assignment | No | - |
|
||||||
| `label_trigger` | The label name that triggers the action when applied to an issue (e.g. "claude") | No | - |
|
| `label_trigger` | The label name that triggers the action when applied to an issue (e.g. "claude") | No | - |
|
||||||
| `trigger_phrase` | The trigger phrase to look for in comments, issue/PR bodies, and issue titles | No | `@claude` |
|
| `trigger_phrase` | The trigger phrase to look for in comments, issue/PR bodies, and issue titles | No | `@claude` |
|
||||||
| `branch_prefix` | The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format) | No | `claude/` |
|
| `branch_prefix` | The prefix to use for Claude branches (defaults to 'claude/', use 'claude-' for dash format) | No | `claude/` |
|
||||||
| `settings` | Claude Code settings as JSON string or path to settings JSON file | No | "" |
|
| `settings` | Claude Code settings as JSON string or path to settings JSON file | No | "" |
|
||||||
| `additional_permissions` | Additional permissions to enable. Currently supports 'actions: read' for viewing workflow results | No | "" |
|
| `additional_permissions` | Additional permissions to enable. Currently supports 'actions: read' for viewing workflow results | No | "" |
|
||||||
| `use_commit_signing` | Enable commit signing using GitHub's API. Simple but cannot perform complex git operations like rebasing. See [Security](./security.md#commit-signing) | No | `false` |
|
| `use_commit_signing` | Enable commit signing using GitHub's API. Simple but cannot perform complex git operations like rebasing. See [Security](./security.md#commit-signing) | No | `false` |
|
||||||
| `ssh_signing_key` | SSH private key for signing commits. Enables signed commits with full git CLI support (rebasing, etc.). See [Security](./security.md#commit-signing) | No | "" |
|
| `ssh_signing_key` | SSH private key for signing commits. Enables signed commits with full git CLI support (rebasing, etc.). See [Security](./security.md#commit-signing) | No | "" |
|
||||||
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID). Required with `ssh_signing_key` for verified commits | No | `41898282` |
|
| `bot_id` | GitHub user ID to use for git operations (defaults to Claude's bot ID). Required with `ssh_signing_key` for verified commits | No | `41898282` |
|
||||||
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name). Required with `ssh_signing_key` for verified commits | No | `claude[bot]` |
|
| `bot_name` | GitHub username to use for git operations (defaults to Claude's bot name). Required with `ssh_signing_key` for verified commits | No | `claude[bot]` |
|
||||||
| `allowed_bots` | Comma-separated list of allowed bot usernames, or '\*' to allow all bots. Empty string (default) allows no bots | No | "" |
|
| `allowed_bots` | Comma-separated list of allowed bot usernames, or '\*' to allow all bots. Empty string (default) allows no bots | No | "" |
|
||||||
| `allowed_non_write_users` | **⚠️ RISKY**: Comma-separated list of usernames to allow without write permissions, or '\*' for all users. Only works with `github_token` input. See [Security](./security.md) | No | "" |
|
| `allowed_non_write_users` | **⚠️ RISKY**: Comma-separated list of usernames to allow without write permissions, or '\*' for all users. Only works with `github_token` input. See [Security](./security.md) | No | "" |
|
||||||
| `path_to_claude_code_executable` | Optional path to a custom Claude Code executable. Skips automatic installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
| `bypass_write_permission_check_acknowledgment` | **REQUIRED** when using `allowed_non_write_users='*'`. Set to `true` to explicitly acknowledge security implications. Prevents accidental security misconfigurations | No | `false` |
|
||||||
| `path_to_bun_executable` | Optional path to a custom Bun executable. Skips automatic Bun installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
| `path_to_claude_code_executable` | Optional path to a custom Claude Code executable. Skips automatic installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
||||||
| `plugin_marketplaces` | Newline-separated list of Claude Code plugin marketplace Git URLs to install from (e.g., see example in workflow above). Marketplaces are added before plugin installation | No | "" |
|
| `path_to_bun_executable` | Optional path to a custom Bun executable. Skips automatic Bun installation. Useful for Nix, custom containers, or specialized environments | No | "" |
|
||||||
| `plugins` | Newline-separated list of Claude Code plugin names to install (e.g., see example in workflow above). Plugins are installed before Claude Code execution | No | "" |
|
| `plugin_marketplaces` | Newline-separated list of Claude Code plugin marketplace Git URLs to install from (e.g., see example in workflow above). Marketplaces are added before plugin installation | No | "" |
|
||||||
|
| `plugins` | Newline-separated list of Claude Code plugin names to install (e.g., see example in workflow above). Plugins are installed before Claude Code execution | No | "" |
|
||||||
|
|
||||||
### Deprecated Inputs
|
### Deprecated Inputs
|
||||||
|
|
||||||
|
|||||||
@@ -26,4 +26,5 @@ jobs:
|
|||||||
|
|
||||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||||
allowed_non_write_users: "*" # Required for issue triage workflow, if users without repo write access create issues
|
allowed_non_write_users: "*" # Required for issue triage workflow, if users without repo write access create issues
|
||||||
|
bypass_write_permission_check_acknowledgment: true # Required when using wildcard
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.1",
|
"@actions/github": "^6.0.1",
|
||||||
"@anthropic-ai/claude-agent-sdk": "^0.1.76",
|
"@anthropic-ai/claude-agent-sdk": "^0.2.6",
|
||||||
"@modelcontextprotocol/sdk": "^1.11.0",
|
"@modelcontextprotocol/sdk": "^1.11.0",
|
||||||
"@octokit/graphql": "^8.2.2",
|
"@octokit/graphql": "^8.2.2",
|
||||||
"@octokit/rest": "^21.1.1",
|
"@octokit/rest": "^21.1.1",
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ async function run() {
|
|||||||
context,
|
context,
|
||||||
context.inputs.allowedNonWriteUsers,
|
context.inputs.allowedNonWriteUsers,
|
||||||
githubTokenProvided,
|
githubTokenProvided,
|
||||||
|
context.inputs.bypassWritePermissionCheckAcknowledgment,
|
||||||
);
|
);
|
||||||
if (!hasWritePermissions) {
|
if (!hasWritePermissions) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
|||||||
@@ -18,6 +18,11 @@ export const PR_QUERY = `
|
|||||||
additions
|
additions
|
||||||
deletions
|
deletions
|
||||||
state
|
state
|
||||||
|
labels(first: 1) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
commits(first: 100) {
|
commits(first: 100) {
|
||||||
totalCount
|
totalCount
|
||||||
nodes {
|
nodes {
|
||||||
@@ -101,6 +106,11 @@ export const ISSUE_QUERY = `
|
|||||||
updatedAt
|
updatedAt
|
||||||
lastEditedAt
|
lastEditedAt
|
||||||
state
|
state
|
||||||
|
labels(first: 1) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
comments(first: 100) {
|
comments(first: 100) {
|
||||||
nodes {
|
nodes {
|
||||||
id
|
id
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ type BaseContext = {
|
|||||||
labelTrigger: string;
|
labelTrigger: string;
|
||||||
baseBranch?: string;
|
baseBranch?: string;
|
||||||
branchPrefix: string;
|
branchPrefix: string;
|
||||||
|
branchNameTemplate?: string;
|
||||||
useStickyComment: boolean;
|
useStickyComment: boolean;
|
||||||
useCommitSigning: boolean;
|
useCommitSigning: boolean;
|
||||||
sshSigningKey: string;
|
sshSigningKey: string;
|
||||||
@@ -95,6 +96,7 @@ type BaseContext = {
|
|||||||
botName: string;
|
botName: string;
|
||||||
allowedBots: string;
|
allowedBots: string;
|
||||||
allowedNonWriteUsers: string;
|
allowedNonWriteUsers: string;
|
||||||
|
bypassWritePermissionCheckAcknowledgment: boolean;
|
||||||
trackProgress: boolean;
|
trackProgress: boolean;
|
||||||
includeFixLinks: boolean;
|
includeFixLinks: boolean;
|
||||||
};
|
};
|
||||||
@@ -145,6 +147,7 @@ export function parseGitHubContext(): GitHubContext {
|
|||||||
labelTrigger: process.env.LABEL_TRIGGER ?? "",
|
labelTrigger: process.env.LABEL_TRIGGER ?? "",
|
||||||
baseBranch: process.env.BASE_BRANCH,
|
baseBranch: process.env.BASE_BRANCH,
|
||||||
branchPrefix: process.env.BRANCH_PREFIX ?? "claude/",
|
branchPrefix: process.env.BRANCH_PREFIX ?? "claude/",
|
||||||
|
branchNameTemplate: process.env.BRANCH_NAME_TEMPLATE,
|
||||||
useStickyComment: process.env.USE_STICKY_COMMENT === "true",
|
useStickyComment: process.env.USE_STICKY_COMMENT === "true",
|
||||||
useCommitSigning: process.env.USE_COMMIT_SIGNING === "true",
|
useCommitSigning: process.env.USE_COMMIT_SIGNING === "true",
|
||||||
sshSigningKey: process.env.SSH_SIGNING_KEY || "",
|
sshSigningKey: process.env.SSH_SIGNING_KEY || "",
|
||||||
@@ -152,6 +155,8 @@ export function parseGitHubContext(): GitHubContext {
|
|||||||
botName: process.env.BOT_NAME ?? CLAUDE_BOT_LOGIN,
|
botName: process.env.BOT_NAME ?? CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: process.env.ALLOWED_BOTS ?? "",
|
allowedBots: process.env.ALLOWED_BOTS ?? "",
|
||||||
allowedNonWriteUsers: process.env.ALLOWED_NON_WRITE_USERS ?? "",
|
allowedNonWriteUsers: process.env.ALLOWED_NON_WRITE_USERS ?? "",
|
||||||
|
bypassWritePermissionCheckAcknowledgment:
|
||||||
|
process.env.BYPASS_WRITE_PERMISSION_CHECK_ACKNOWLEDGMENT === "true",
|
||||||
trackProgress: process.env.TRACK_PROGRESS === "true",
|
trackProgress: process.env.TRACK_PROGRESS === "true",
|
||||||
includeFixLinks: process.env.INCLUDE_FIX_LINKS === "true",
|
includeFixLinks: process.env.INCLUDE_FIX_LINKS === "true",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import type { Octokits } from "../api/client";
|
|||||||
import { ISSUE_QUERY, PR_QUERY, USER_QUERY } from "../api/queries/github";
|
import { ISSUE_QUERY, PR_QUERY, USER_QUERY } from "../api/queries/github";
|
||||||
import {
|
import {
|
||||||
isIssueCommentEvent,
|
isIssueCommentEvent,
|
||||||
|
isIssuesEvent,
|
||||||
|
isPullRequestEvent,
|
||||||
isPullRequestReviewEvent,
|
isPullRequestReviewEvent,
|
||||||
isPullRequestReviewCommentEvent,
|
isPullRequestReviewCommentEvent,
|
||||||
type ParsedGitHubContext,
|
type ParsedGitHubContext,
|
||||||
@@ -40,6 +42,31 @@ export function extractTriggerTimestamp(
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the original title from the GitHub webhook payload.
|
||||||
|
* This is the title as it existed when the trigger event occurred.
|
||||||
|
*
|
||||||
|
* @param context - Parsed GitHub context from webhook
|
||||||
|
* @returns The original title string or undefined if not available
|
||||||
|
*/
|
||||||
|
export function extractOriginalTitle(
|
||||||
|
context: ParsedGitHubContext,
|
||||||
|
): string | undefined {
|
||||||
|
if (isIssueCommentEvent(context)) {
|
||||||
|
return context.payload.issue?.title;
|
||||||
|
} else if (isPullRequestEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isPullRequestReviewEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isPullRequestReviewCommentEvent(context)) {
|
||||||
|
return context.payload.pull_request?.title;
|
||||||
|
} else if (isIssuesEvent(context)) {
|
||||||
|
return context.payload.issue?.title;
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filters comments to only include those that existed in their final state before the trigger time.
|
* Filters comments to only include those that existed in their final state before the trigger time.
|
||||||
* This prevents malicious actors from editing comments after the trigger to inject harmful content.
|
* This prevents malicious actors from editing comments after the trigger to inject harmful content.
|
||||||
@@ -146,6 +173,7 @@ type FetchDataParams = {
|
|||||||
isPR: boolean;
|
isPR: boolean;
|
||||||
triggerUsername?: string;
|
triggerUsername?: string;
|
||||||
triggerTime?: string;
|
triggerTime?: string;
|
||||||
|
originalTitle?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type GitHubFileWithSHA = GitHubFile & {
|
export type GitHubFileWithSHA = GitHubFile & {
|
||||||
@@ -169,6 +197,7 @@ export async function fetchGitHubData({
|
|||||||
isPR,
|
isPR,
|
||||||
triggerUsername,
|
triggerUsername,
|
||||||
triggerTime,
|
triggerTime,
|
||||||
|
originalTitle,
|
||||||
}: FetchDataParams): Promise<FetchDataResult> {
|
}: FetchDataParams): Promise<FetchDataResult> {
|
||||||
const [owner, repo] = repository.split("/");
|
const [owner, repo] = repository.split("/");
|
||||||
if (!owner || !repo) {
|
if (!owner || !repo) {
|
||||||
@@ -354,6 +383,11 @@ export async function fetchGitHubData({
|
|||||||
triggerDisplayName = await fetchUserDisplayName(octokits, triggerUsername);
|
triggerDisplayName = await fetchUserDisplayName(octokits, triggerUsername);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use the original title from the webhook payload if provided
|
||||||
|
if (originalTitle !== undefined) {
|
||||||
|
contextData.title = originalTitle;
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
contextData,
|
contextData,
|
||||||
comments,
|
comments,
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ export function formatContext(
|
|||||||
): string {
|
): string {
|
||||||
if (isPR) {
|
if (isPR) {
|
||||||
const prData = contextData as GitHubPullRequest;
|
const prData = contextData as GitHubPullRequest;
|
||||||
return `PR Title: ${prData.title}
|
const sanitizedTitle = sanitizeContent(prData.title);
|
||||||
|
return `PR Title: ${sanitizedTitle}
|
||||||
PR Author: ${prData.author.login}
|
PR Author: ${prData.author.login}
|
||||||
PR Branch: ${prData.headRefName} -> ${prData.baseRefName}
|
PR Branch: ${prData.headRefName} -> ${prData.baseRefName}
|
||||||
PR State: ${prData.state}
|
PR State: ${prData.state}
|
||||||
@@ -24,7 +25,8 @@ Total Commits: ${prData.commits.totalCount}
|
|||||||
Changed Files: ${prData.files.nodes.length} files`;
|
Changed Files: ${prData.files.nodes.length} files`;
|
||||||
} else {
|
} else {
|
||||||
const issueData = contextData as GitHubIssue;
|
const issueData = contextData as GitHubIssue;
|
||||||
return `Issue Title: ${issueData.title}
|
const sanitizedTitle = sanitizeContent(issueData.title);
|
||||||
|
return `Issue Title: ${sanitizedTitle}
|
||||||
Issue Author: ${issueData.author.login}
|
Issue Author: ${issueData.author.login}
|
||||||
Issue State: ${issueData.state}`;
|
Issue State: ${issueData.state}`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,12 +6,22 @@
|
|||||||
* - For Issues: Create a new branch
|
* - For Issues: Create a new branch
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { $ } from "bun";
|
||||||
import { execFileSync } from "child_process";
|
import { execFileSync } from "child_process";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import type { ParsedGitHubContext } from "../context";
|
import type { ParsedGitHubContext } from "../context";
|
||||||
import type { GitHubPullRequest } from "../types";
|
import type { GitHubPullRequest } from "../types";
|
||||||
import type { Octokits } from "../api/client";
|
import type { Octokits } from "../api/client";
|
||||||
import type { FetchDataResult } from "../data/fetcher";
|
import type { FetchDataResult } from "../data/fetcher";
|
||||||
|
import { generateBranchName } from "../../utils/branch-template";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the first label from GitHub data, or returns undefined if no labels exist
|
||||||
|
*/
|
||||||
|
function extractFirstLabel(githubData: FetchDataResult): string | undefined {
|
||||||
|
const labels = githubData.contextData.labels?.nodes;
|
||||||
|
return labels && labels.length > 0 ? labels[0]?.name : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates a git branch name against a strict whitelist pattern.
|
* Validates a git branch name against a strict whitelist pattern.
|
||||||
@@ -125,7 +135,7 @@ export async function setupBranch(
|
|||||||
): Promise<BranchInfo> {
|
): Promise<BranchInfo> {
|
||||||
const { owner, repo } = context.repository;
|
const { owner, repo } = context.repository;
|
||||||
const entityNumber = context.entityNumber;
|
const entityNumber = context.entityNumber;
|
||||||
const { baseBranch, branchPrefix } = context.inputs;
|
const { baseBranch, branchPrefix, branchNameTemplate } = context.inputs;
|
||||||
const isPR = context.isPR;
|
const isPR = context.isPR;
|
||||||
|
|
||||||
if (isPR) {
|
if (isPR) {
|
||||||
@@ -191,17 +201,8 @@ export async function setupBranch(
|
|||||||
// Generate branch name for either an issue or closed/merged PR
|
// Generate branch name for either an issue or closed/merged PR
|
||||||
const entityType = isPR ? "pr" : "issue";
|
const entityType = isPR ? "pr" : "issue";
|
||||||
|
|
||||||
// Create Kubernetes-compatible timestamp: lowercase, hyphens only, shorter format
|
// Get the SHA of the source branch to use in template
|
||||||
const now = new Date();
|
let sourceSHA: string | undefined;
|
||||||
const timestamp = `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`;
|
|
||||||
|
|
||||||
// Ensure branch name is Kubernetes-compatible:
|
|
||||||
// - Lowercase only
|
|
||||||
// - Alphanumeric with hyphens
|
|
||||||
// - No underscores
|
|
||||||
// - Max 50 chars (to allow for prefixes)
|
|
||||||
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${timestamp}`;
|
|
||||||
const newBranch = branchName.toLowerCase().substring(0, 50);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get the SHA of the source branch to verify it exists
|
// Get the SHA of the source branch to verify it exists
|
||||||
@@ -211,8 +212,46 @@ export async function setupBranch(
|
|||||||
ref: `heads/${sourceBranch}`,
|
ref: `heads/${sourceBranch}`,
|
||||||
});
|
});
|
||||||
|
|
||||||
const currentSHA = sourceBranchRef.data.object.sha;
|
sourceSHA = sourceBranchRef.data.object.sha;
|
||||||
console.log(`Source branch SHA: ${currentSHA}`);
|
console.log(`Source branch SHA: ${sourceSHA}`);
|
||||||
|
|
||||||
|
// Extract first label from GitHub data
|
||||||
|
const firstLabel = extractFirstLabel(githubData);
|
||||||
|
|
||||||
|
// Extract title from GitHub data
|
||||||
|
const title = githubData.contextData.title;
|
||||||
|
|
||||||
|
// Generate branch name using template or default format
|
||||||
|
let newBranch = generateBranchName(
|
||||||
|
branchNameTemplate,
|
||||||
|
branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
sourceSHA,
|
||||||
|
firstLabel,
|
||||||
|
title,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if generated branch already exists on remote
|
||||||
|
try {
|
||||||
|
await $`git ls-remote --exit-code origin refs/heads/${newBranch}`.quiet();
|
||||||
|
|
||||||
|
// If we get here, branch exists (exit code 0)
|
||||||
|
console.log(
|
||||||
|
`Branch '${newBranch}' already exists, falling back to default format`,
|
||||||
|
);
|
||||||
|
newBranch = generateBranchName(
|
||||||
|
undefined, // Force default template
|
||||||
|
branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
sourceSHA,
|
||||||
|
firstLabel,
|
||||||
|
title,
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
// Branch doesn't exist (non-zero exit code), continue with generated name
|
||||||
|
}
|
||||||
|
|
||||||
// For commit signing, defer branch creation to the file ops server
|
// For commit signing, defer branch creation to the file ops server
|
||||||
if (context.inputs.useCommitSigning) {
|
if (context.inputs.useCommitSigning) {
|
||||||
|
|||||||
@@ -63,6 +63,11 @@ export type GitHubPullRequest = {
|
|||||||
additions: number;
|
additions: number;
|
||||||
deletions: number;
|
deletions: number;
|
||||||
state: string;
|
state: string;
|
||||||
|
labels: {
|
||||||
|
nodes: Array<{
|
||||||
|
name: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
commits: {
|
commits: {
|
||||||
totalCount: number;
|
totalCount: number;
|
||||||
nodes: Array<{
|
nodes: Array<{
|
||||||
@@ -88,6 +93,11 @@ export type GitHubIssue = {
|
|||||||
updatedAt?: string;
|
updatedAt?: string;
|
||||||
lastEditedAt?: string;
|
lastEditedAt?: string;
|
||||||
state: string;
|
state: string;
|
||||||
|
labels: {
|
||||||
|
nodes: Array<{
|
||||||
|
name: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
comments: {
|
comments: {
|
||||||
nodes: GitHubComment[];
|
nodes: GitHubComment[];
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import type { Octokit } from "@octokit/rest";
|
|||||||
* @param context - The GitHub context
|
* @param context - The GitHub context
|
||||||
* @param allowedNonWriteUsers - Comma-separated list of users allowed without write permissions, or '*' for all
|
* @param allowedNonWriteUsers - Comma-separated list of users allowed without write permissions, or '*' for all
|
||||||
* @param githubTokenProvided - Whether github_token was provided as input (not from app)
|
* @param githubTokenProvided - Whether github_token was provided as input (not from app)
|
||||||
|
* @param bypassAcknowledgment - Explicit acknowledgment required when using wildcard (*)
|
||||||
* @returns true if the actor has write permissions, false otherwise
|
* @returns true if the actor has write permissions, false otherwise
|
||||||
*/
|
*/
|
||||||
export async function checkWritePermissions(
|
export async function checkWritePermissions(
|
||||||
@@ -15,6 +16,7 @@ export async function checkWritePermissions(
|
|||||||
context: ParsedGitHubContext,
|
context: ParsedGitHubContext,
|
||||||
allowedNonWriteUsers?: string,
|
allowedNonWriteUsers?: string,
|
||||||
githubTokenProvided?: boolean,
|
githubTokenProvided?: boolean,
|
||||||
|
bypassAcknowledgment?: boolean,
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const { repository, actor } = context;
|
const { repository, actor } = context;
|
||||||
|
|
||||||
@@ -25,6 +27,17 @@ export async function checkWritePermissions(
|
|||||||
if (allowedNonWriteUsers && githubTokenProvided) {
|
if (allowedNonWriteUsers && githubTokenProvided) {
|
||||||
const allowedUsers = allowedNonWriteUsers.trim();
|
const allowedUsers = allowedNonWriteUsers.trim();
|
||||||
if (allowedUsers === "*") {
|
if (allowedUsers === "*") {
|
||||||
|
if (!bypassAcknowledgment) {
|
||||||
|
core.error(
|
||||||
|
`❌ SECURITY ERROR: Attempting to bypass write permission checks for all users with allowed_non_write_users='*' without explicit acknowledgment. ` +
|
||||||
|
`This is a critical security misconfiguration. To proceed, you must set bypass_write_permission_check_acknowledgment='true' ` +
|
||||||
|
`to explicitly acknowledge the security implications.`,
|
||||||
|
);
|
||||||
|
throw new Error(
|
||||||
|
"Cannot bypass write permission checks with wildcard (*) without explicit acknowledgment. " +
|
||||||
|
"Set bypass_write_permission_check_acknowledgment='true' to acknowledge security implications.",
|
||||||
|
);
|
||||||
|
}
|
||||||
core.warning(
|
core.warning(
|
||||||
`⚠️ SECURITY WARNING: Bypassing write permission check for ${actor} due to allowed_non_write_users='*'. This should only be used for workflows with very limited permissions.`,
|
`⚠️ SECURITY WARNING: Bypassing write permission check for ${actor} due to allowed_non_write_users='*'. This should only be used for workflows with very limited permissions.`,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|||||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { readFile, stat } from "fs/promises";
|
import { readFile, stat } from "fs/promises";
|
||||||
import { join } from "path";
|
import { resolve } from "path";
|
||||||
import { constants } from "fs";
|
import { constants } from "fs";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { GITHUB_API_URL } from "../github/api/config";
|
import { GITHUB_API_URL } from "../github/api/config";
|
||||||
import { retryWithBackoff } from "../utils/retry";
|
import { retryWithBackoff } from "../utils/retry";
|
||||||
|
import { validatePathWithinRepo } from "./path-validation";
|
||||||
|
|
||||||
type GitHubRef = {
|
type GitHubRef = {
|
||||||
object: {
|
object: {
|
||||||
@@ -213,12 +214,18 @@ server.tool(
|
|||||||
throw new Error("GITHUB_TOKEN environment variable is required");
|
throw new Error("GITHUB_TOKEN environment variable is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
const processedFiles = files.map((filePath) => {
|
// Validate all paths are within repository root and get full/relative paths
|
||||||
if (filePath.startsWith("/")) {
|
const resolvedRepoDir = resolve(REPO_DIR);
|
||||||
return filePath.slice(1);
|
const validatedFiles = await Promise.all(
|
||||||
}
|
files.map(async (filePath) => {
|
||||||
return filePath;
|
const fullPath = await validatePathWithinRepo(filePath, REPO_DIR);
|
||||||
});
|
// Calculate the relative path for the git tree entry
|
||||||
|
// Use the original filePath (normalized) for the git path, not the symlink-resolved path
|
||||||
|
const normalizedPath = resolve(resolvedRepoDir, filePath);
|
||||||
|
const relativePath = normalizedPath.slice(resolvedRepoDir.length + 1);
|
||||||
|
return { fullPath, relativePath };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
// 1. Get the branch reference (create if doesn't exist)
|
// 1. Get the branch reference (create if doesn't exist)
|
||||||
const baseSha = await getOrCreateBranchRef(
|
const baseSha = await getOrCreateBranchRef(
|
||||||
@@ -247,18 +254,14 @@ server.tool(
|
|||||||
|
|
||||||
// 3. Create tree entries for all files
|
// 3. Create tree entries for all files
|
||||||
const treeEntries = await Promise.all(
|
const treeEntries = await Promise.all(
|
||||||
processedFiles.map(async (filePath) => {
|
validatedFiles.map(async ({ fullPath, relativePath }) => {
|
||||||
const fullPath = filePath.startsWith("/")
|
|
||||||
? filePath
|
|
||||||
: join(REPO_DIR, filePath);
|
|
||||||
|
|
||||||
// Get the proper file mode based on file permissions
|
// Get the proper file mode based on file permissions
|
||||||
const fileMode = await getFileMode(fullPath);
|
const fileMode = await getFileMode(fullPath);
|
||||||
|
|
||||||
// Check if file is binary (images, etc.)
|
// Check if file is binary (images, etc.)
|
||||||
const isBinaryFile =
|
const isBinaryFile =
|
||||||
/\.(png|jpg|jpeg|gif|webp|ico|pdf|zip|tar|gz|exe|bin|woff|woff2|ttf|eot)$/i.test(
|
/\.(png|jpg|jpeg|gif|webp|ico|pdf|zip|tar|gz|exe|bin|woff|woff2|ttf|eot)$/i.test(
|
||||||
filePath,
|
relativePath,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isBinaryFile) {
|
if (isBinaryFile) {
|
||||||
@@ -284,7 +287,7 @@ server.tool(
|
|||||||
if (!blobResponse.ok) {
|
if (!blobResponse.ok) {
|
||||||
const errorText = await blobResponse.text();
|
const errorText = await blobResponse.text();
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to create blob for ${filePath}: ${blobResponse.status} - ${errorText}`,
|
`Failed to create blob for ${relativePath}: ${blobResponse.status} - ${errorText}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -292,7 +295,7 @@ server.tool(
|
|||||||
|
|
||||||
// Return tree entry with blob SHA
|
// Return tree entry with blob SHA
|
||||||
return {
|
return {
|
||||||
path: filePath,
|
path: relativePath,
|
||||||
mode: fileMode,
|
mode: fileMode,
|
||||||
type: "blob",
|
type: "blob",
|
||||||
sha: blobData.sha,
|
sha: blobData.sha,
|
||||||
@@ -301,7 +304,7 @@ server.tool(
|
|||||||
// For text files, include content directly in tree
|
// For text files, include content directly in tree
|
||||||
const content = await readFile(fullPath, "utf-8");
|
const content = await readFile(fullPath, "utf-8");
|
||||||
return {
|
return {
|
||||||
path: filePath,
|
path: relativePath,
|
||||||
mode: fileMode,
|
mode: fileMode,
|
||||||
type: "blob",
|
type: "blob",
|
||||||
content: content,
|
content: content,
|
||||||
@@ -421,7 +424,9 @@ server.tool(
|
|||||||
author: newCommitData.author.name,
|
author: newCommitData.author.name,
|
||||||
date: newCommitData.author.date,
|
date: newCommitData.author.date,
|
||||||
},
|
},
|
||||||
files: processedFiles.map((path) => ({ path })),
|
files: validatedFiles.map(({ relativePath }) => ({
|
||||||
|
path: relativePath,
|
||||||
|
})),
|
||||||
tree: {
|
tree: {
|
||||||
sha: treeData.sha,
|
sha: treeData.sha,
|
||||||
},
|
},
|
||||||
|
|||||||
64
src/mcp/path-validation.ts
Normal file
64
src/mcp/path-validation.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { realpath } from "fs/promises";
|
||||||
|
import { resolve, sep } from "path";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates that a file path resolves within the repository root.
|
||||||
|
* Prevents path traversal attacks via "../" sequences and symlinks.
|
||||||
|
* @param filePath - The file path to validate (can be relative or absolute)
|
||||||
|
* @param repoRoot - The repository root directory
|
||||||
|
* @returns The resolved absolute path (with symlinks resolved) if valid
|
||||||
|
* @throws Error if the path resolves outside the repository root
|
||||||
|
*/
|
||||||
|
export async function validatePathWithinRepo(
|
||||||
|
filePath: string,
|
||||||
|
repoRoot: string,
|
||||||
|
): Promise<string> {
|
||||||
|
// First resolve the path string (handles .. and . segments)
|
||||||
|
const initialPath = resolve(repoRoot, filePath);
|
||||||
|
|
||||||
|
// Resolve symlinks to get the real path
|
||||||
|
// This prevents symlink attacks where a link inside the repo points outside
|
||||||
|
let resolvedRoot: string;
|
||||||
|
let resolvedPath: string;
|
||||||
|
|
||||||
|
try {
|
||||||
|
resolvedRoot = await realpath(repoRoot);
|
||||||
|
} catch {
|
||||||
|
throw new Error(`Repository root '${repoRoot}' does not exist`);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
resolvedPath = await realpath(initialPath);
|
||||||
|
} catch {
|
||||||
|
// File doesn't exist yet - fall back to checking the parent directory
|
||||||
|
// This handles the case where we're creating a new file
|
||||||
|
const parentDir = resolve(initialPath, "..");
|
||||||
|
try {
|
||||||
|
const resolvedParent = await realpath(parentDir);
|
||||||
|
if (
|
||||||
|
resolvedParent !== resolvedRoot &&
|
||||||
|
!resolvedParent.startsWith(resolvedRoot + sep)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Path '${filePath}' resolves outside the repository root`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Parent is valid, return the initial path since file doesn't exist yet
|
||||||
|
return initialPath;
|
||||||
|
} catch {
|
||||||
|
throw new Error(
|
||||||
|
`Path '${filePath}' resolves outside the repository root`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path must be within repo root (or be the root itself)
|
||||||
|
if (
|
||||||
|
resolvedPath !== resolvedRoot &&
|
||||||
|
!resolvedPath.startsWith(resolvedRoot + sep)
|
||||||
|
) {
|
||||||
|
throw new Error(`Path '${filePath}' resolves outside the repository root`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedPath;
|
||||||
|
}
|
||||||
@@ -1,22 +1,33 @@
|
|||||||
export function parseAllowedTools(claudeArgs: string): string[] {
|
export function parseAllowedTools(claudeArgs: string): string[] {
|
||||||
// Match --allowedTools or --allowed-tools followed by the value
|
// Match --allowedTools or --allowed-tools followed by the value
|
||||||
// Handle both quoted and unquoted values
|
// Handle both quoted and unquoted values
|
||||||
|
// Use /g flag to find ALL occurrences, not just the first one
|
||||||
const patterns = [
|
const patterns = [
|
||||||
/--(?:allowedTools|allowed-tools)\s+"([^"]+)"/, // Double quoted
|
/--(?:allowedTools|allowed-tools)\s+"([^"]+)"/g, // Double quoted
|
||||||
/--(?:allowedTools|allowed-tools)\s+'([^']+)'/, // Single quoted
|
/--(?:allowedTools|allowed-tools)\s+'([^']+)'/g, // Single quoted
|
||||||
/--(?:allowedTools|allowed-tools)\s+([^\s]+)/, // Unquoted
|
/--(?:allowedTools|allowed-tools)\s+([^'"\s][^\s]*)/g, // Unquoted (must not start with quote)
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const tools: string[] = [];
|
||||||
|
const seen = new Set<string>();
|
||||||
|
|
||||||
for (const pattern of patterns) {
|
for (const pattern of patterns) {
|
||||||
const match = claudeArgs.match(pattern);
|
for (const match of claudeArgs.matchAll(pattern)) {
|
||||||
if (match && match[1]) {
|
if (match[1]) {
|
||||||
// Don't return if the value starts with -- (another flag)
|
// Don't add if the value starts with -- (another flag)
|
||||||
if (match[1].startsWith("--")) {
|
if (match[1].startsWith("--")) {
|
||||||
return [];
|
continue;
|
||||||
|
}
|
||||||
|
for (const tool of match[1].split(",")) {
|
||||||
|
const trimmed = tool.trim();
|
||||||
|
if (trimmed && !seen.has(trimmed)) {
|
||||||
|
seen.add(trimmed);
|
||||||
|
tools.push(trimmed);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return match[1].split(",").map((t) => t.trim());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return [];
|
return tools;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import { prepareMcpConfig } from "../../mcp/install-mcp-server";
|
|||||||
import {
|
import {
|
||||||
fetchGitHubData,
|
fetchGitHubData,
|
||||||
extractTriggerTimestamp,
|
extractTriggerTimestamp,
|
||||||
|
extractOriginalTitle,
|
||||||
} from "../../github/data/fetcher";
|
} from "../../github/data/fetcher";
|
||||||
import { createPrompt, generateDefaultPrompt } from "../../create-prompt";
|
import { createPrompt, generateDefaultPrompt } from "../../create-prompt";
|
||||||
import { isEntityContext } from "../../github/context";
|
import { isEntityContext } from "../../github/context";
|
||||||
@@ -78,6 +79,7 @@ export const tagMode: Mode = {
|
|||||||
const commentId = commentData.id;
|
const commentId = commentData.id;
|
||||||
|
|
||||||
const triggerTime = extractTriggerTimestamp(context);
|
const triggerTime = extractTriggerTimestamp(context);
|
||||||
|
const originalTitle = extractOriginalTitle(context);
|
||||||
|
|
||||||
const githubData = await fetchGitHubData({
|
const githubData = await fetchGitHubData({
|
||||||
octokits: octokit,
|
octokits: octokit,
|
||||||
@@ -86,6 +88,7 @@ export const tagMode: Mode = {
|
|||||||
isPR: context.isPR,
|
isPR: context.isPR,
|
||||||
triggerUsername: context.actor,
|
triggerUsername: context.actor,
|
||||||
triggerTime,
|
triggerTime,
|
||||||
|
originalTitle,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup branch
|
// Setup branch
|
||||||
|
|||||||
99
src/utils/branch-template.ts
Normal file
99
src/utils/branch-template.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Branch name template parsing and variable substitution utilities
|
||||||
|
*/
|
||||||
|
|
||||||
|
const NUM_DESCRIPTION_WORDS = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the first 5 words from a title and converts them to kebab-case
|
||||||
|
*/
|
||||||
|
function extractDescription(
|
||||||
|
title: string,
|
||||||
|
numWords: number = NUM_DESCRIPTION_WORDS,
|
||||||
|
): string {
|
||||||
|
if (!title || title.trim() === "") {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
return title
|
||||||
|
.trim()
|
||||||
|
.split(/\s+/)
|
||||||
|
.slice(0, numWords) // Only first `numWords` words
|
||||||
|
.join("-")
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9-]/g, "") // Remove non-alphanumeric except hyphens
|
||||||
|
.replace(/-+/g, "-") // Replace multiple hyphens with single
|
||||||
|
.replace(/^-|-$/g, ""); // Remove leading/trailing hyphens
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BranchTemplateVariables {
|
||||||
|
prefix: string;
|
||||||
|
entityType: string;
|
||||||
|
entityNumber: number;
|
||||||
|
timestamp: string;
|
||||||
|
sha?: string;
|
||||||
|
label?: string;
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces template variables in a branch name template
|
||||||
|
* Template format: {{variableName}}
|
||||||
|
*/
|
||||||
|
export function applyBranchTemplate(
|
||||||
|
template: string,
|
||||||
|
variables: BranchTemplateVariables,
|
||||||
|
): string {
|
||||||
|
let result = template;
|
||||||
|
|
||||||
|
// Replace each variable
|
||||||
|
Object.entries(variables).forEach(([key, value]) => {
|
||||||
|
const placeholder = `{{${key}}}`;
|
||||||
|
const replacement = value ? String(value) : "";
|
||||||
|
result = result.replaceAll(placeholder, replacement);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a branch name from the provided `template` and set of `variables`. Uses a default format if the template is empty or produces an empty result.
|
||||||
|
*/
|
||||||
|
export function generateBranchName(
|
||||||
|
template: string | undefined,
|
||||||
|
branchPrefix: string,
|
||||||
|
entityType: string,
|
||||||
|
entityNumber: number,
|
||||||
|
sha?: string,
|
||||||
|
label?: string,
|
||||||
|
title?: string,
|
||||||
|
): string {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const variables: BranchTemplateVariables = {
|
||||||
|
prefix: branchPrefix,
|
||||||
|
entityType,
|
||||||
|
entityNumber,
|
||||||
|
timestamp: `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}${String(now.getDate()).padStart(2, "0")}-${String(now.getHours()).padStart(2, "0")}${String(now.getMinutes()).padStart(2, "0")}`,
|
||||||
|
sha: sha?.substring(0, 8), // First 8 characters of SHA
|
||||||
|
label: label || entityType, // Fall back to entityType if no label
|
||||||
|
description: title ? extractDescription(title) : undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (template?.trim()) {
|
||||||
|
const branchName = applyBranchTemplate(template, variables);
|
||||||
|
|
||||||
|
// Some templates could produce empty results- validate
|
||||||
|
if (branchName.trim().length > 0) return branchName;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Branch template '${template}' generated empty result, falling back to default format`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const branchName = `${branchPrefix}${entityType}-${entityNumber}-${variables.timestamp}`;
|
||||||
|
// Kubernetes compatible: lowercase, max 50 chars, alphanumeric and hyphens only
|
||||||
|
return branchName.toLowerCase().substring(0, 50);
|
||||||
|
}
|
||||||
247
test/branch-template.test.ts
Normal file
247
test/branch-template.test.ts
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
import { describe, it, expect } from "bun:test";
|
||||||
|
import {
|
||||||
|
applyBranchTemplate,
|
||||||
|
generateBranchName,
|
||||||
|
} from "../src/utils/branch-template";
|
||||||
|
|
||||||
|
describe("branch template utilities", () => {
|
||||||
|
describe("applyBranchTemplate", () => {
|
||||||
|
it("should replace all template variables", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}{{entityType}}-{{entityNumber}}-{{timestamp}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "feat/",
|
||||||
|
entityType: "issue",
|
||||||
|
entityNumber: 123,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
sha: "abcd1234",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("feat/issue-123-20240301-1430");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle custom templates with multiple variables", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}fix/{{entityType}}_{{entityNumber}}_{{timestamp}}_{{sha}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "claude-",
|
||||||
|
entityType: "pr",
|
||||||
|
entityNumber: 456,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
sha: "abcd1234",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("claude-fix/pr_456_20240301-1430_abcd1234");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle templates with missing variables gracefully", () => {
|
||||||
|
const template = "{{prefix}}{{entityType}}-{{missing}}-{{entityNumber}}";
|
||||||
|
const variables = {
|
||||||
|
prefix: "feat/",
|
||||||
|
entityType: "issue",
|
||||||
|
entityNumber: 123,
|
||||||
|
timestamp: "20240301-1430",
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = applyBranchTemplate(template, variables);
|
||||||
|
expect(result).toBe("feat/issue-{{missing}}-123");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("generateBranchName", () => {
|
||||||
|
it("should use custom template when provided", () => {
|
||||||
|
const template = "{{prefix}}custom-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/custom-issue_123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default format when template is empty", () => {
|
||||||
|
const result = generateBranchName("", "claude/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default format when template is undefined", () => {
|
||||||
|
const result = generateBranchName(undefined, "claude/", "pr", 456);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/pr-456-\d{8}-\d{4}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve custom template formatting (no automatic lowercase/truncation)", () => {
|
||||||
|
const template = "{{prefix}}UPPERCASE_Branch-Name_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "Feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe("Feature/UPPERCASE_Branch-Name_123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not truncate custom template results", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}very-long-branch-name-that-exceeds-the-maximum-allowed-length-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toBe(
|
||||||
|
"feature/very-long-branch-name-that-exceeds-the-maximum-allowed-length-123",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should apply Kubernetes-compatible transformations to default template only", () => {
|
||||||
|
const result = generateBranchName(undefined, "Feature/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^feature\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle SHA in template", () => {
|
||||||
|
const template = "{{prefix}}{{entityType}}-{{entityNumber}}-{{sha}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
789,
|
||||||
|
"abcdef123456",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/pr-789-abcdef12");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use label in template when provided", () => {
|
||||||
|
const template = "{{prefix}}{{label}}/{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
123,
|
||||||
|
undefined,
|
||||||
|
"bug",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/bug/123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to entityType when label template is used but no label provided", () => {
|
||||||
|
const template = "{{prefix}}{{label}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(template, "fix/", "pr", 456);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/pr-456");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle template with both label and entityType", () => {
|
||||||
|
const template = "{{prefix}}{{label}}-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"dev/",
|
||||||
|
"issue",
|
||||||
|
789,
|
||||||
|
undefined,
|
||||||
|
"enhancement",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("dev/enhancement-issue_789");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use description in template when provided", () => {
|
||||||
|
const template = "{{prefix}}{{description}}/{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
123,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"Fix login bug with OAuth",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("feature/fix-login-bug-with-oauth/123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle template with multiple variables including description", () => {
|
||||||
|
const template =
|
||||||
|
"{{prefix}}{{label}}/{{description}}-{{entityType}}_{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"dev/",
|
||||||
|
"issue",
|
||||||
|
456,
|
||||||
|
undefined,
|
||||||
|
"bug",
|
||||||
|
"User authentication fails completely",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe(
|
||||||
|
"dev/bug/user-authentication-fails-completely-issue_456",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle description with special characters in template", () => {
|
||||||
|
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
789,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"Add: User Registration & Email Validation",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("fix/add-user-registration-email-789");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should truncate descriptions to exactly 5 words", () => {
|
||||||
|
const result = generateBranchName(
|
||||||
|
"{{prefix}}{{description}}/{{entityNumber}}",
|
||||||
|
"feature/",
|
||||||
|
"issue",
|
||||||
|
999,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"This is a very long title with many more than five words in it",
|
||||||
|
);
|
||||||
|
expect(result).toBe("feature/this-is-a-very-long/999");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty description in template", () => {
|
||||||
|
const template = "{{prefix}}{{description}}-{{entityNumber}}";
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"test/",
|
||||||
|
"issue",
|
||||||
|
101,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe("test/-101");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to default format when template produces empty result", () => {
|
||||||
|
const template = "{{description}}"; // Will be empty if no title provided
|
||||||
|
const result = generateBranchName(template, "claude/", "issue", 123);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^claude\/issue-123-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to default format when template produces only whitespace", () => {
|
||||||
|
const template = " {{description}} "; // Will be " " if description is empty
|
||||||
|
const result = generateBranchName(
|
||||||
|
template,
|
||||||
|
"fix/",
|
||||||
|
"pr",
|
||||||
|
456,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toMatch(/^fix\/pr-456-\d{8}-\d{4}$/);
|
||||||
|
expect(result.length).toBeLessThanOrEqual(50);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -61,6 +61,7 @@ describe("generatePrompt", () => {
|
|||||||
body: "This is a test PR",
|
body: "This is a test PR",
|
||||||
author: { login: "testuser" },
|
author: { login: "testuser" },
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: { nodes: [] },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
additions: 15,
|
additions: 15,
|
||||||
deletions: 5,
|
deletions: 5,
|
||||||
@@ -475,6 +476,7 @@ describe("generatePrompt", () => {
|
|||||||
body: "The login form is not working",
|
body: "The login form is not working",
|
||||||
author: { login: "testuser" },
|
author: { login: "testuser" },
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: { nodes: [] },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
comments: {
|
comments: {
|
||||||
nodes: [],
|
nodes: [],
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { describe, expect, it, jest } from "bun:test";
|
import { describe, expect, it, jest } from "bun:test";
|
||||||
import {
|
import {
|
||||||
extractTriggerTimestamp,
|
extractTriggerTimestamp,
|
||||||
|
extractOriginalTitle,
|
||||||
fetchGitHubData,
|
fetchGitHubData,
|
||||||
filterCommentsToTriggerTime,
|
filterCommentsToTriggerTime,
|
||||||
filterReviewsToTriggerTime,
|
filterReviewsToTriggerTime,
|
||||||
@@ -9,6 +10,7 @@ import {
|
|||||||
import {
|
import {
|
||||||
createMockContext,
|
createMockContext,
|
||||||
mockIssueCommentContext,
|
mockIssueCommentContext,
|
||||||
|
mockPullRequestCommentContext,
|
||||||
mockPullRequestReviewContext,
|
mockPullRequestReviewContext,
|
||||||
mockPullRequestReviewCommentContext,
|
mockPullRequestReviewCommentContext,
|
||||||
mockPullRequestOpenedContext,
|
mockPullRequestOpenedContext,
|
||||||
@@ -63,6 +65,47 @@ describe("extractTriggerTimestamp", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("extractOriginalTitle", () => {
|
||||||
|
it("should extract title from IssueCommentEvent on PR", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestCommentContext);
|
||||||
|
expect(title).toBe("Fix: Memory leak in user service");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from PullRequestReviewEvent", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestReviewContext);
|
||||||
|
expect(title).toBe("Refactor: Improve error handling in API layer");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from PullRequestReviewCommentEvent", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestReviewCommentContext);
|
||||||
|
expect(title).toBe("Performance: Optimize search algorithm");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from pull_request event", () => {
|
||||||
|
const title = extractOriginalTitle(mockPullRequestOpenedContext);
|
||||||
|
expect(title).toBe("Feature: Add user authentication");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract title from issues event", () => {
|
||||||
|
const title = extractOriginalTitle(mockIssueOpenedContext);
|
||||||
|
expect(title).toBe("Bug: Application crashes on startup");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return undefined for event without title", () => {
|
||||||
|
const context = createMockContext({
|
||||||
|
eventName: "issue_comment",
|
||||||
|
payload: {
|
||||||
|
comment: {
|
||||||
|
id: 123,
|
||||||
|
body: "test",
|
||||||
|
},
|
||||||
|
} as any,
|
||||||
|
});
|
||||||
|
const title = extractOriginalTitle(context);
|
||||||
|
expect(title).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe("filterCommentsToTriggerTime", () => {
|
describe("filterCommentsToTriggerTime", () => {
|
||||||
const createMockComment = (
|
const createMockComment = (
|
||||||
createdAt: string,
|
createdAt: string,
|
||||||
@@ -945,4 +988,115 @@ describe("fetchGitHubData integration with time filtering", () => {
|
|||||||
);
|
);
|
||||||
expect(hasPrBodyInMap).toBe(false);
|
expect(hasPrBodyInMap).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should use originalTitle when provided instead of fetched title", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Fetched Title From GraphQL",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
originalTitle: "Original Title From Webhook",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe("Original Title From Webhook");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use fetched title when originalTitle is not provided", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Fetched Title From GraphQL",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe("Fetched Title From GraphQL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use original title from webhook even if title was edited after trigger", async () => {
|
||||||
|
const mockOctokits = {
|
||||||
|
graphql: jest.fn().mockResolvedValue({
|
||||||
|
repository: {
|
||||||
|
pullRequest: {
|
||||||
|
number: 123,
|
||||||
|
title: "Edited Title (from GraphQL)",
|
||||||
|
body: "PR body",
|
||||||
|
author: { login: "author" },
|
||||||
|
createdAt: "2024-01-15T10:00:00Z",
|
||||||
|
lastEditedAt: "2024-01-15T12:30:00Z", // Edited after trigger
|
||||||
|
additions: 10,
|
||||||
|
deletions: 5,
|
||||||
|
state: "OPEN",
|
||||||
|
commits: { totalCount: 1, nodes: [] },
|
||||||
|
files: { nodes: [] },
|
||||||
|
comments: { nodes: [] },
|
||||||
|
reviews: { nodes: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { login: "trigger-user" },
|
||||||
|
}),
|
||||||
|
rest: jest.fn() as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await fetchGitHubData({
|
||||||
|
octokits: mockOctokits as any,
|
||||||
|
repository: "test-owner/test-repo",
|
||||||
|
prNumber: "123",
|
||||||
|
isPR: true,
|
||||||
|
triggerUsername: "trigger-user",
|
||||||
|
triggerTime: "2024-01-15T12:00:00Z",
|
||||||
|
originalTitle: "Original Title (from webhook at trigger time)",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.contextData.title).toBe(
|
||||||
|
"Original Title (from webhook at trigger time)",
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ describe("formatContext", () => {
|
|||||||
additions: 50,
|
additions: 50,
|
||||||
deletions: 30,
|
deletions: 30,
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: {
|
||||||
|
nodes: [],
|
||||||
|
},
|
||||||
commits: {
|
commits: {
|
||||||
totalCount: 3,
|
totalCount: 3,
|
||||||
nodes: [],
|
nodes: [],
|
||||||
@@ -63,6 +66,9 @@ Changed Files: 2 files`,
|
|||||||
author: { login: "test-user" },
|
author: { login: "test-user" },
|
||||||
createdAt: "2023-01-01T00:00:00Z",
|
createdAt: "2023-01-01T00:00:00Z",
|
||||||
state: "OPEN",
|
state: "OPEN",
|
||||||
|
labels: {
|
||||||
|
nodes: [],
|
||||||
|
},
|
||||||
comments: {
|
comments: {
|
||||||
nodes: [],
|
nodes: [],
|
||||||
},
|
},
|
||||||
|
|||||||
214
test/github-file-ops-path-validation.test.ts
Normal file
214
test/github-file-ops-path-validation.test.ts
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import { describe, expect, it, beforeAll, afterAll } from "bun:test";
|
||||||
|
import { validatePathWithinRepo } from "../src/mcp/path-validation";
|
||||||
|
import { resolve } from "path";
|
||||||
|
import { mkdir, writeFile, symlink, rm, realpath } from "fs/promises";
|
||||||
|
import { tmpdir } from "os";
|
||||||
|
|
||||||
|
describe("validatePathWithinRepo", () => {
|
||||||
|
// Use a real temp directory for tests that need filesystem access
|
||||||
|
let testDir: string;
|
||||||
|
let repoRoot: string;
|
||||||
|
let outsideDir: string;
|
||||||
|
// Real paths after symlink resolution (e.g., /tmp -> /private/tmp on macOS)
|
||||||
|
let realRepoRoot: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Create test directory structure
|
||||||
|
testDir = resolve(tmpdir(), `path-validation-test-${Date.now()}`);
|
||||||
|
repoRoot = resolve(testDir, "repo");
|
||||||
|
outsideDir = resolve(testDir, "outside");
|
||||||
|
|
||||||
|
await mkdir(repoRoot, { recursive: true });
|
||||||
|
await mkdir(resolve(repoRoot, "src"), { recursive: true });
|
||||||
|
await mkdir(outsideDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create test files
|
||||||
|
await writeFile(resolve(repoRoot, "file.txt"), "inside repo");
|
||||||
|
await writeFile(resolve(repoRoot, "src", "main.js"), "console.log('hi')");
|
||||||
|
await writeFile(resolve(outsideDir, "secret.txt"), "sensitive data");
|
||||||
|
|
||||||
|
// Get real paths after symlink resolution
|
||||||
|
realRepoRoot = await realpath(repoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
// Cleanup
|
||||||
|
await rm(testDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("valid paths", () => {
|
||||||
|
it("should accept simple relative paths", async () => {
|
||||||
|
const result = await validatePathWithinRepo("file.txt", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept nested relative paths", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept paths with single dot segments", async () => {
|
||||||
|
const result = await validatePathWithinRepo("./src/main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept paths that use .. but resolve inside repo", async () => {
|
||||||
|
// src/../file.txt resolves to file.txt which is still inside repo
|
||||||
|
const result = await validatePathWithinRepo("src/../file.txt", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept absolute paths within the repo root", async () => {
|
||||||
|
const absolutePath = resolve(repoRoot, "file.txt");
|
||||||
|
const result = await validatePathWithinRepo(absolutePath, repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept the repo root itself", async () => {
|
||||||
|
const result = await validatePathWithinRepo(".", repoRoot);
|
||||||
|
expect(result).toBe(realRepoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle new files (non-existent) in valid directories", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/newfile.js", repoRoot);
|
||||||
|
// For non-existent files, we validate the parent but return the initial path
|
||||||
|
// (can't realpath a file that doesn't exist yet)
|
||||||
|
expect(result).toBe(resolve(repoRoot, "src/newfile.js"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("path traversal attacks", () => {
|
||||||
|
it("should reject simple parent directory traversal", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("../outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject deeply nested parent directory traversal", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("../../../etc/passwd", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject traversal hidden within path", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("src/../../outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject traversal at the end of path", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("src/../..", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject absolute paths outside the repo root", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("/etc/passwd", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject absolute paths to sibling directories", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo(resolve(outsideDir, "secret.txt"), repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("symlink attacks", () => {
|
||||||
|
it("should reject symlinks pointing outside the repo", async () => {
|
||||||
|
// Create a symlink inside the repo that points to a file outside
|
||||||
|
const symlinkPath = resolve(repoRoot, "evil-link");
|
||||||
|
await symlink(resolve(outsideDir, "secret.txt"), symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// The symlink path looks like it's inside the repo, but points outside
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("evil-link", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject symlinks to parent directories", async () => {
|
||||||
|
// Create a symlink to the parent directory
|
||||||
|
const symlinkPath = resolve(repoRoot, "parent-link");
|
||||||
|
await symlink(testDir, symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("parent-link/outside/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept symlinks that resolve within the repo", async () => {
|
||||||
|
// Create a symlink inside the repo that points to another file inside
|
||||||
|
const symlinkPath = resolve(repoRoot, "good-link");
|
||||||
|
await symlink(resolve(repoRoot, "file.txt"), symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await validatePathWithinRepo("good-link", repoRoot);
|
||||||
|
// Should resolve to the actual file location
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "file.txt"));
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject directory symlinks that escape the repo", async () => {
|
||||||
|
// Create a symlink to outside directory
|
||||||
|
const symlinkPath = resolve(repoRoot, "escape-dir");
|
||||||
|
await symlink(outsideDir, symlinkPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("escape-dir/secret.txt", repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(symlinkPath, { force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("edge cases", () => {
|
||||||
|
it("should handle empty path (current directory)", async () => {
|
||||||
|
const result = await validatePathWithinRepo("", repoRoot);
|
||||||
|
expect(result).toBe(realRepoRoot);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle paths with multiple consecutive slashes", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src//main.js", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src/main.js"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle paths with trailing slashes", async () => {
|
||||||
|
const result = await validatePathWithinRepo("src/", repoRoot);
|
||||||
|
expect(result).toBe(resolve(realRepoRoot, "src"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject prefix attack (repo root as prefix but not parent)", async () => {
|
||||||
|
// Create a sibling directory with repo name as prefix
|
||||||
|
const evilDir = repoRoot + "-evil";
|
||||||
|
await mkdir(evilDir, { recursive: true });
|
||||||
|
await writeFile(resolve(evilDir, "file.txt"), "evil");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo(resolve(evilDir, "file.txt"), repoRoot),
|
||||||
|
).rejects.toThrow(/resolves outside the repository root/);
|
||||||
|
} finally {
|
||||||
|
await rm(evilDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error for non-existent repo root", async () => {
|
||||||
|
await expect(
|
||||||
|
validatePathWithinRepo("file.txt", "/nonexistent/repo"),
|
||||||
|
).rejects.toThrow(/does not exist/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -35,12 +35,44 @@ describe("parseAllowedTools", () => {
|
|||||||
expect(parseAllowedTools("")).toEqual([]);
|
expect(parseAllowedTools("")).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("handles duplicate --allowedTools flags", () => {
|
test("handles --allowedTools followed by another --allowedTools flag", () => {
|
||||||
const args = "--allowedTools --allowedTools mcp__github__*";
|
const args = "--allowedTools --allowedTools mcp__github__*";
|
||||||
// Should not match the first one since the value is another flag
|
// The second --allowedTools is consumed as a value of the first, then skipped.
|
||||||
|
// This is an edge case with malformed input - returns empty.
|
||||||
expect(parseAllowedTools(args)).toEqual([]);
|
expect(parseAllowedTools(args)).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("parses multiple separate --allowed-tools flags", () => {
|
||||||
|
const args =
|
||||||
|
"--allowed-tools 'mcp__context7__*' --allowed-tools 'Read,Glob' --allowed-tools 'mcp__github_inline_comment__*'";
|
||||||
|
expect(parseAllowedTools(args)).toEqual([
|
||||||
|
"mcp__context7__*",
|
||||||
|
"Read",
|
||||||
|
"Glob",
|
||||||
|
"mcp__github_inline_comment__*",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("parses multiple --allowed-tools flags on separate lines", () => {
|
||||||
|
const args = `--model 'claude-haiku'
|
||||||
|
--allowed-tools 'mcp__context7__*'
|
||||||
|
--allowed-tools 'Read,Glob,Grep'
|
||||||
|
--allowed-tools 'mcp__github_inline_comment__create_inline_comment'`;
|
||||||
|
expect(parseAllowedTools(args)).toEqual([
|
||||||
|
"mcp__context7__*",
|
||||||
|
"Read",
|
||||||
|
"Glob",
|
||||||
|
"Grep",
|
||||||
|
"mcp__github_inline_comment__create_inline_comment",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("deduplicates tools from multiple flags", () => {
|
||||||
|
const args =
|
||||||
|
"--allowed-tools 'Read,Glob' --allowed-tools 'Glob,Grep' --allowed-tools 'Read'";
|
||||||
|
expect(parseAllowedTools(args)).toEqual(["Read", "Glob", "Grep"]);
|
||||||
|
});
|
||||||
|
|
||||||
test("handles typo --alloedTools", () => {
|
test("handles typo --alloedTools", () => {
|
||||||
const args = "--alloedTools mcp__github__*";
|
const args = "--alloedTools mcp__github__*";
|
||||||
expect(parseAllowedTools(args)).toEqual([]);
|
expect(parseAllowedTools(args)).toEqual([]);
|
||||||
|
|||||||
@@ -73,6 +73,7 @@ describe("checkWritePermissions", () => {
|
|||||||
botName: CLAUDE_BOT_LOGIN,
|
botName: CLAUDE_BOT_LOGIN,
|
||||||
allowedBots: "",
|
allowedBots: "",
|
||||||
allowedNonWriteUsers: "",
|
allowedNonWriteUsers: "",
|
||||||
|
bypassWritePermissionCheckAcknowledgment: false,
|
||||||
trackProgress: false,
|
trackProgress: false,
|
||||||
includeFixLinks: true,
|
includeFixLinks: true,
|
||||||
},
|
},
|
||||||
@@ -197,7 +198,7 @@ describe("checkWritePermissions", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("should bypass permission check for all users with wildcard", async () => {
|
test("should bypass permission check for all users with wildcard when acknowledgment provided", async () => {
|
||||||
const mockOctokit = createMockOctokit("read");
|
const mockOctokit = createMockOctokit("read");
|
||||||
const context = createContext();
|
const context = createContext();
|
||||||
|
|
||||||
@@ -206,6 +207,7 @@ describe("checkWritePermissions", () => {
|
|||||||
context,
|
context,
|
||||||
"*",
|
"*",
|
||||||
true,
|
true,
|
||||||
|
true, // acknowledgment provided
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(result).toBe(true);
|
expect(result).toBe(true);
|
||||||
@@ -214,6 +216,17 @@ describe("checkWritePermissions", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("should FAIL to bypass permission check with wildcard when acknowledgment NOT provided", async () => {
|
||||||
|
const mockOctokit = createMockOctokit("read");
|
||||||
|
const context = createContext();
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
checkWritePermissions(mockOctokit, context, "*", true, false),
|
||||||
|
).rejects.toThrow(
|
||||||
|
"Cannot bypass write permission checks with wildcard (*) without explicit acknowledgment",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test("should NOT bypass permission check when user not in allowed list", async () => {
|
test("should NOT bypass permission check when user not in allowed list", async () => {
|
||||||
const mockOctokit = createMockOctokit("read");
|
const mockOctokit = createMockOctokit("read");
|
||||||
const context = createContext();
|
const context = createContext();
|
||||||
|
|||||||
@@ -87,6 +87,7 @@ describe("pull_request_target event support", () => {
|
|||||||
},
|
},
|
||||||
comments: { nodes: [] },
|
comments: { nodes: [] },
|
||||||
reviews: { nodes: [] },
|
reviews: { nodes: [] },
|
||||||
|
labels: { nodes: [] },
|
||||||
},
|
},
|
||||||
comments: [],
|
comments: [],
|
||||||
changedFiles: [],
|
changedFiles: [],
|
||||||
|
|||||||
Reference in New Issue
Block a user