Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,159 changes: 1,159 additions & 0 deletions .github/workflows/refiner.lock.yml

Large diffs are not rendered by default.

402 changes: 402 additions & 0 deletions .github/workflows/refiner.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion .github/workflows/smoke-copilot.lock.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 7 additions & 7 deletions actions/setup/js/merge_remote_agent_github_folder.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -146,23 +146,23 @@ function validateGitParameter(value, name) {
*/
function validateSafePath(userPath, basePath, name) {
// Reject paths with null bytes
if (userPath.includes('\0')) {
if (userPath.includes("\0")) {
throw new Error(`Invalid ${name}: contains null bytes`);
}

// Reject paths that attempt to traverse up (..)
if (userPath.includes('..')) {
if (userPath.includes("..")) {
throw new Error(`Invalid ${name}: path traversal detected`);
}

// Resolve the full path and ensure it's within the base path
const resolvedPath = path.resolve(basePath, userPath);
const resolvedBase = path.resolve(basePath);

if (!resolvedPath.startsWith(resolvedBase + path.sep) && resolvedPath !== resolvedBase) {
throw new Error(`Invalid ${name}: path escapes base directory`);
}

return resolvedPath;
}

Expand Down Expand Up @@ -239,7 +239,7 @@ function mergeGithubFolder(sourcePath, destPath) {
for (const relativePath of sourceFiles) {
// Validate relative path to prevent path traversal
validateSafePath(relativePath, sourcePath, "relative file path");

// Check if the file is in one of the allowed subfolders
const pathParts = relativePath.split(path.sep);
const topLevelFolder = pathParts[0];
Expand Down
3 changes: 2 additions & 1 deletion actions/setup/js/safe_output_handler_manager.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -768,7 +768,8 @@ async function main() {
core.warning(`✗ Failed to submit PR review: ${reviewResult.error}`);
}
} catch (reviewError) {
core.warning(`✗ Exception while submitting PR review: ${reviewError.message || reviewError}`);
const errorMessage = reviewError instanceof Error ? reviewError.message : String(reviewError);
core.warning(`✗ Exception while submitting PR review: ${errorMessage}`);
}
}

Expand Down
3 changes: 2 additions & 1 deletion actions/setup/js/safe_output_unified_handler_manager.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -970,7 +970,8 @@ async function main() {
core.warning(`✗ Failed to submit PR review: ${reviewResult.error}`);
}
} catch (reviewError) {
core.warning(`✗ Exception while submitting PR review: ${reviewError.message || reviewError}`);
const errorMessage = reviewError instanceof Error ? reviewError.message : String(reviewError);
core.warning(`✗ Exception while submitting PR review: ${errorMessage}`);
}
}

Expand Down
37 changes: 19 additions & 18 deletions actions/setup/js/validate_memory_files.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -4,72 +4,73 @@
const fs = require("fs");
const path = require("path");

/**
* @typedef {Object} ValidationResult
* @property {boolean} valid - Whether all files passed validation
* @property {string[]} invalidFiles - List of files with invalid extensions
*/

/**
* Validate that all files in a memory directory have allowed file extensions
* If allowedExtensions is empty or not provided, all file extensions are allowed
*
* @param {string} memoryDir - Path to the memory directory to validate
* @param {string} memoryType - Type of memory ("cache" or "repo") for error messages
* @param {string} [memoryType="cache"] - Type of memory ("cache" or "repo") for error messages
* @param {string[]} [allowedExtensions] - Optional custom list of allowed extensions (empty array or undefined means allow all files)
* @returns {{valid: boolean, invalidFiles: string[]}} Validation result with list of invalid files
* @returns {ValidationResult} Validation result with list of invalid files
*/
function validateMemoryFiles(memoryDir, memoryType = "cache", allowedExtensions) {
// If allowedExtensions is not provided, undefined, or empty array, allow all files
const allowAll = !allowedExtensions || allowedExtensions.length === 0;
const allowAll = !allowedExtensions?.length;

// If allowing all files, skip validation
if (allowAll) {
core.info(`All file extensions are allowed in ${memoryType}-memory directory`);
return { valid: true, invalidFiles: [] };
}

// Normalize extensions to lowercase and trim whitespace
const extensions = allowedExtensions.map(ext => ext.trim().toLowerCase());
const invalidFiles = [];

// Check if directory exists
if (!fs.existsSync(memoryDir)) {
core.info(`Memory directory does not exist: ${memoryDir}`);
return { valid: true, invalidFiles: [] };
}

const extensions = allowedExtensions.map(ext => ext.trim().toLowerCase());
const invalidFiles = [];

/**
* Recursively scan directory for files
* @param {string} dirPath - Directory to scan
* @param {string} relativePath - Relative path from memory directory
* @param {string} [relativePath=""] - Relative path from memory directory
*/
function scanDirectory(dirPath, relativePath = "") {
const scanDirectory = (dirPath, relativePath = "") => {
const entries = fs.readdirSync(dirPath, { withFileTypes: true });

for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativeFilePath = relativePath ? path.join(relativePath, entry.name) : entry.name;

if (entry.isDirectory()) {
// Recursively scan subdirectory
scanDirectory(fullPath, relativeFilePath);
} else if (entry.isFile()) {
// Check file extension
const ext = path.extname(entry.name).toLowerCase();
if (!extensions.includes(ext)) {
invalidFiles.push(relativeFilePath);
}
}
}
}
};

try {
scanDirectory(memoryDir);
} catch (error) {
core.error(`Failed to scan ${memoryType}-memory directory: ${error instanceof Error ? error.message : String(error)}`);
const message = error instanceof Error ? error.message : String(error);
core.error(`Failed to scan ${memoryType}-memory directory: ${message}`);
return { valid: false, invalidFiles: [] };
}

if (invalidFiles.length > 0) {
core.error(`Found ${invalidFiles.length} file(s) with invalid extensions in ${memoryType}-memory:`);
invalidFiles.forEach(file => {
const ext = path.extname(file).toLowerCase();
core.error(` - ${file} (extension: ${ext || "(no extension)"})`);
const ext = path.extname(file).toLowerCase() || "(no extension)";
core.error(` - ${file} (extension: ${ext})`);
});
core.error(`Allowed extensions: ${extensions.join(", ")}`);
return { valid: false, invalidFiles };
Expand Down
14 changes: 14 additions & 0 deletions docs/src/content/docs/reference/safe-outputs.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ The agent requests issue creation; a separate job with `issues: write` creates i
- [**Assign Milestone**](#assign-milestone-assign-milestone) (`assign-milestone`) - Assign issues to milestones (max: 1)
- [**Assign to Agent**](#assign-to-agent-assign-to-agent) (`assign-to-agent`) - Assign Copilot agents to issues or PRs (max: 1)
- [**Assign to User**](#assign-to-user-assign-to-user) (`assign-to-user`) - Assign users to issues (max: 1)
- [**Unassign from User**](#unassign-from-user-unassign-from-user) (`unassign-from-user`) - Remove user assignments from issues or PRs (max: 1)

### Projects, Releases & Assets

Expand Down Expand Up @@ -1279,6 +1280,19 @@ safe-outputs:
target-repo: "owner/repo" # cross-repository
```

### Unassign from User (`unassign-from-user:`)

Removes user assignments from issues or pull requests. Restrict with `allowed` list to control which users can be unassigned. Target: `"triggering"` (issue/PR event), `"*"` (any), or number.

```yaml wrap
safe-outputs:
unassign-from-user:
allowed: [user1, user2] # restrict to specific users
max: 1 # max unassignments (default: 1)
target: "*" # "triggering" (default), "*", or number
target-repo: "owner/repo" # cross-repository
```

## Cross-Repository Operations

Many safe outputs support `target-repo`. Requires PAT (`github-token` or `GH_AW_GITHUB_TOKEN`)-default `GITHUB_TOKEN` is current-repo only. Use specific names (no wildcards).
Expand Down
11 changes: 9 additions & 2 deletions pkg/cli/logs_extract_zip_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,14 +209,21 @@ func TestExtractZipFileErrorHandling(t *testing.T) {
zipReader, err := zip.NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()))
require.NoError(t, err)

// Extract to a read-only destination (will fail on create)
// Extract to a read-only destination (will fail on create in most environments).
//
// Note: When running as root (or with elevated permissions), creating files inside a
// 0555 directory may still succeed depending on the platform and filesystem.
// In that case, we skip this assertion rather than make the suite flaky.
readOnlyDir := filepath.Join(tempDir, "readonly")
err = os.MkdirAll(readOnlyDir, 0555) // Read-only directory
require.NoError(t, err)

// Try to extract - should fail and return error
err = extractZipFile(zipReader.File[0], readOnlyDir, false)
require.Error(t, err, "extractZipFile should return error when destination is read-only")
if err == nil {
// Likely running with elevated privileges.
t.Skip("expected extraction to fail in read-only directory, but it succeeded (likely elevated privileges)")
}
assert.Contains(t, err.Error(), "failed to create", "Error should mention creation failure")
})

Expand Down
Loading