Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 28 additions & 10 deletions .github/workflows/rust-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,15 @@ jobs:
targets: ${{ matrix.target }}
components: clippy

- name: Compute lockfile hash
id: lockhash
working-directory: codex-rs
shell: bash
run: |
set -euo pipefail
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"

# Explicit cache restore: split cargo home vs target, so we can
# avoid caching the large target dir on the gnu-dev job.
- name: Restore cargo home cache
Expand All @@ -164,7 +173,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-

Expand Down Expand Up @@ -201,9 +210,9 @@ jobs:
uses: actions/cache/restore@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-

- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
Expand Down Expand Up @@ -278,15 +287,15 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}

- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}

- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'
Expand Down Expand Up @@ -364,6 +373,15 @@ jobs:
with:
targets: ${{ matrix.target }}

- name: Compute lockfile hash
id: lockhash
working-directory: codex-rs
shell: bash
run: |
set -euo pipefail
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"

- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@v4
Expand All @@ -373,7 +391,7 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-

Expand Down Expand Up @@ -409,9 +427,9 @@ jobs:
uses: actions/cache/restore@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-

- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
Expand All @@ -436,15 +454,15 @@ jobs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}

- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@v4
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}

- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'
Expand Down
1 change: 1 addition & 0 deletions codex-rs/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions codex-rs/windows-sandbox-rs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,5 @@ features = [
"Win32_Security_Authentication_Identity",
]
version = "0.52"
[dev-dependencies]
tempfile = "3"
154 changes: 118 additions & 36 deletions codex-rs/windows-sandbox-rs/src/allow.rs
Original file line number Diff line number Diff line change
@@ -1,21 +1,33 @@
use crate::policy::SandboxPolicy;
use dunce::canonicalize;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;

#[derive(Debug, Default, PartialEq, Eq)]
pub struct AllowDenyPaths {
pub allow: HashSet<PathBuf>,
pub deny: HashSet<PathBuf>,
}

pub fn compute_allow_paths(
policy: &SandboxPolicy,
policy_cwd: &Path,
command_cwd: &Path,
env_map: &HashMap<String, String>,
) -> Vec<PathBuf> {
let mut allow: Vec<PathBuf> = Vec::new();
let mut seen = std::collections::HashSet::new();
) -> AllowDenyPaths {
let mut allow: HashSet<PathBuf> = HashSet::new();
let mut deny: HashSet<PathBuf> = HashSet::new();

let mut add_path = |p: PathBuf| {
if seen.insert(p.to_string_lossy().to_string()) && p.exists() {
allow.push(p);
let mut add_allow_path = |p: PathBuf| {
if p.exists() {
allow.insert(p);
}
};
let mut add_deny_path = |p: PathBuf| {
if p.exists() {
deny.insert(p);
}
};
let include_tmp_env_vars = matches!(
Expand All @@ -27,45 +39,72 @@ pub fn compute_allow_paths(
);

if matches!(policy, SandboxPolicy::WorkspaceWrite { .. }) {
add_path(command_cwd.to_path_buf());
if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = policy {
for root in writable_roots {
let add_writable_root =
|root: PathBuf,
policy_cwd: &Path,
add_allow: &mut dyn FnMut(PathBuf),
add_deny: &mut dyn FnMut(PathBuf)| {
let candidate = if root.is_absolute() {
root.clone()
root
} else {
policy_cwd.join(root)
};
let canonical = canonicalize(&candidate).unwrap_or(candidate);
add_path(canonical);
add_allow(canonical.clone());

let git_dir = canonical.join(".git");
if git_dir.is_dir() {
add_deny(git_dir);
}
};

add_writable_root(
command_cwd.to_path_buf(),
policy_cwd,
&mut add_allow_path,
&mut add_deny_path,
);

if let SandboxPolicy::WorkspaceWrite { writable_roots, .. } = policy {
for root in writable_roots {
add_writable_root(
root.clone(),
policy_cwd,
&mut add_allow_path,
&mut add_deny_path,
);
}
}
}
if include_tmp_env_vars {
for key in ["TEMP", "TMP"] {
if let Some(v) = env_map.get(key) {
let abs = PathBuf::from(v);
add_path(abs);
add_allow_path(abs);
} else if let Ok(v) = std::env::var(key) {
let abs = PathBuf::from(v);
add_path(abs);
add_allow_path(abs);
}
}
}
allow
AllowDenyPaths { allow, deny }
}

#[cfg(test)]
mod tests {
use super::compute_allow_paths;
use codex_protocol::protocol::SandboxPolicy;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
use std::path::PathBuf;
use tempfile::TempDir;

#[test]
fn includes_additional_writable_roots() {
let command_cwd = PathBuf::from(r"C:\Workspace");
let extra_root = PathBuf::from(r"C:\AdditionalWritableRoot");
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let extra_root = tmp.path().join("extra");
let _ = fs::create_dir_all(&command_cwd);
let _ = fs::create_dir_all(&extra_root);

Expand All @@ -76,22 +115,22 @@ mod tests {
exclude_slash_tmp: false,
};

let allow = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());

assert!(
allow.iter().any(|p| p == &command_cwd),
"command cwd should be allowed"
);
assert!(
allow.iter().any(|p| p == &extra_root),
"additional writable root should be allowed"
);
assert!(paths
.allow
.contains(&dunce::canonicalize(&command_cwd).unwrap()));
assert!(paths
.allow
.contains(&dunce::canonicalize(&extra_root).unwrap()));
assert!(paths.deny.is_empty(), "no deny paths expected");
}

#[test]
fn excludes_tmp_env_vars_when_requested() {
let command_cwd = PathBuf::from(r"C:\Workspace");
let temp_dir = PathBuf::from(r"C:\TempDir");
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let temp_dir = tmp.path().join("temp");
let _ = fs::create_dir_all(&command_cwd);
let _ = fs::create_dir_all(&temp_dir);

Expand All @@ -104,15 +143,58 @@ mod tests {
let mut env_map = HashMap::new();
env_map.insert("TEMP".into(), temp_dir.to_string_lossy().to_string());

let allow = compute_allow_paths(&policy, &command_cwd, &command_cwd, &env_map);
let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &env_map);

assert!(
allow.iter().any(|p| p == &command_cwd),
"command cwd should be allowed"
);
assert!(
!allow.iter().any(|p| p == &temp_dir),
"TEMP should be excluded when exclude_tmpdir_env_var is true"
);
assert!(paths
.allow
.contains(&dunce::canonicalize(&command_cwd).unwrap()));
assert!(!paths
.allow
.contains(&dunce::canonicalize(&temp_dir).unwrap()));
assert!(paths.deny.is_empty(), "no deny paths expected");
}

#[test]
fn denies_git_dir_inside_writable_root() {
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let git_dir = command_cwd.join(".git");
let _ = fs::create_dir_all(&git_dir);

let policy = SandboxPolicy::WorkspaceWrite {
writable_roots: vec![],
network_access: false,
exclude_tmpdir_env_var: true,
exclude_slash_tmp: false,
};

let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
let expected_allow: HashSet<PathBuf> = [dunce::canonicalize(&command_cwd).unwrap()]
.into_iter()
.collect();
let expected_deny: HashSet<PathBuf> = [dunce::canonicalize(&git_dir).unwrap()]
.into_iter()
.collect();

assert_eq!(expected_allow, paths.allow);
assert_eq!(expected_deny, paths.deny);
}

#[test]
fn skips_git_dir_when_missing() {
let tmp = TempDir::new().expect("tempdir");
let command_cwd = tmp.path().join("workspace");
let _ = fs::create_dir_all(&command_cwd);

let policy = SandboxPolicy::WorkspaceWrite {
writable_roots: vec![],
network_access: false,
exclude_tmpdir_env_var: true,
exclude_slash_tmp: false,
};

let paths = compute_allow_paths(&policy, &command_cwd, &command_cwd, &HashMap::new());
assert_eq!(paths.allow.len(), 1);
assert!(paths.deny.is_empty(), "no deny when .git is absent");
}
}
14 changes: 11 additions & 3 deletions codex-rs/windows-sandbox-rs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ pub use stub::CaptureResult;
#[cfg(target_os = "windows")]
mod windows_impl {
use super::acl::add_allow_ace;
use super::acl::add_deny_write_ace;
use super::acl::allow_null_device;
use super::acl::revoke_ace;
use super::allow::compute_allow_paths;
use super::allow::AllowDenyPaths;
use super::cap::cap_sid_file;
use super::cap::load_or_create_cap_sids;
use super::env::apply_no_network_to_env;
Expand Down Expand Up @@ -195,8 +197,6 @@ mod windows_impl {
ensure_codex_home_exists(codex_home)?;

let current_dir = cwd.to_path_buf();
// for now, don't fail if we detect world-writable directories
// audit::audit_everyone_writable(&current_dir, &env_map)?;
let logs_base_dir = Some(codex_home);
log_start(&command, logs_base_dir);
let cap_sid_path = cap_sid_file(codex_home);
Expand Down Expand Up @@ -238,7 +238,8 @@ mod windows_impl {
}

let persist_aces = is_workspace_write;
let allow = compute_allow_paths(&policy, sandbox_policy_cwd, &current_dir, &env_map);
let AllowDenyPaths { allow, deny } =
compute_allow_paths(&policy, sandbox_policy_cwd, &current_dir, &env_map);
let mut guards: Vec<(PathBuf, *mut c_void)> = Vec::new();
unsafe {
for p in &allow {
Expand All @@ -254,6 +255,13 @@ mod windows_impl {
}
}
}
for p in &deny {
if let Ok(added) = add_deny_write_ace(p, psid_to_use) {
if added && !persist_aces {
guards.push((p.clone(), psid_to_use));
}
}
}
allow_null_device(psid_to_use);
}

Expand Down
Loading