diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index a99715c..0495421 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -1,4 +1,3 @@ -# This file was initially autogenerated by maturin v1.5.0 name: Build and Test on: @@ -16,7 +15,36 @@ permissions: contents: read jobs: + code_checks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: Run pre-commit checks + run: | + pip install pre-commit + pre-commit run --all-files + + sdist: + needs: [code_checks] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build sdist + uses: PyO3/maturin-action@v1 + with: + command: sdist + args: --out dist + - name: Upload sdist + uses: actions/upload-artifact@v4 + with: + name: wheels-sdist + path: dist + linux: + needs: [code_checks] runs-on: ubuntu-latest strategy: matrix: @@ -34,7 +62,7 @@ jobs: with: target: ${{ matrix.target }} args: --release --out dist --find-interpreter - # sccache: 'true' + # sccache: 'true' # Disabled due to issues with sccache on linux manylinux: auto before-script-linux: | # If we're running on rhel centos, install needed packages. thx s3rius! @@ -49,37 +77,15 @@ jobs: with: name: wheels-linux-${{ matrix.target }} path: dist - # - name: Setup Docker - # run: | - # sudo apt-get install apt-transport-https ca-certificates curl software-properties-common - # curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - - # sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - # sudo apt-get install docker-ce - name: Install and Test - if: ${{ startsWith(matrix.target, 'x86_64') }} shell: bash run: | set -e pip install .[dev] --find-links dist --force-reinstall pytest -v tests/ - # - name: Install and Test - # if: ${{ !startsWith(matrix.target, 'x86') && matrix.target != 'ppc64' }} - # uses: uraimo/run-on-arch-action@v2.5.0 - # with: - # arch: ${{ matrix.target }} - # distro: ubuntu22.04 - # githubToken: ${{ github.token }} - # install: | - # apt-get update - # apt-get install -y --no-install-recommends python3 python3-pip - # pip3 install -U pip pytest - # run: | - # set -e - # pip3 install .[dev] --find-links dist --force-reinstall - # pip3 install pytest docker - # pytest -v tests/ windows: + needs: [code_checks] runs-on: windows-latest strategy: matrix: @@ -90,6 +96,12 @@ jobs: with: python-version: '3.10' architecture: ${{ matrix.target }} + - name: Set Perl environment variables + run: | + echo "PERL=$((where.exe perl)[0])" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + echo "OPENSSL_SRC_PERL=$((where.exe perl)[0])" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + - name: Remove wincred from docker config + run: sed -i '/wincred/d' ~/.docker/config.json - name: Build wheels uses: PyO3/maturin-action@v1 with: @@ -101,24 +113,25 @@ jobs: with: name: wheels-windows-${{ matrix.target }} path: dist - # - name: Set up Docker Buildx - # uses: docker/setup-buildx-action@v3 - # - name: Install and Test - # if: ${{ !startsWith(matrix.target, 'aarch64') }} + - name: Install + shell: bash + run: pip install .[dev] --find-links dist --force-reinstall + # Disabled on windows due to issues with docker + # BuildError: no matching manifest for windows/amd64 ... in the manifest list entries + # - name: Test + # if: ${{ !startsWith(matrix.target, 'x86') }} # shell: bash - # run: | - # set -e - # pip install .[dev] --find-links dist --force-reinstall - # pip install pytest docker - # pytest -v tests/ + # run: pytest -v tests/ macos: + needs: [code_checks] runs-on: macos-latest strategy: matrix: target: [x86_64, aarch64] env: OPENSSL_NO_VENDOR: 1 + DOCKER_HOST: unix:///var/run/docker.sock steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 @@ -137,27 +150,15 @@ jobs: with: name: wheels-macos-${{ matrix.target }} path: dist - # - name: Install and Test - # if: ${{ !startsWith(matrix.target, 'aarch64') }} - # shell: bash + # - name: setup-docker # run: | - # set -e - # pip install .[dev] --find-links dist --force-reinstall - # pip install pytest docker pexpect - # pytest -v tests/ - - sdist: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Build sdist - uses: PyO3/maturin-action@v1 - with: - command: sdist - args: --out dist - - name: Upload sdist - uses: actions/upload-artifact@v4 - with: - name: wheels-sdist - path: dist - + # brew install docker + # open --background -a Docker + # while ! docker info &> /dev/null; do sleep 1; done + - name: Install + shell: bash + run: pip install .[dev] --find-links dist --force-reinstall + # Disabled on macos due to issues with docker + # - name: Test + # shell: bash + # run: pytest -v tests/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..f3e7736 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +# checks and formatting for rust code +repos: +- repo: local + hooks: + - id: cargo-fmt + name: cargo fmt + entry: cargo fmt -- + language: system + types: [rust] + pass_filenames: false +- repo: https://github.com/doublify/pre-commit-rust + rev: v1.0 + hooks: + - id: clippy + +# checks and formatting for python code +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.3.4 + hooks: + - id: ruff-format # Formatter + - id: ruff # Linter + args: [--fix, --exit-non-zero-on-fix] diff --git a/README.md b/README.md index 5104eac..4c372d4 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Hussh: SSH for humans. [![image](https://img.shields.io/pypi/v/hussh.svg)](https://pypi.python.org/pypi/hussh) [![image](https://img.shields.io/pypi/pyversions/hussh.svg)](https://pypi.python.org/pypi/hussh) +![PyPI - Wheel](https://img.shields.io/pypi/wheel/hussh) [![Actions status](https://github.com/jacobcallahan/hussh/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/jacobcallahan/hussh/actions) Hussh (pronounced "hush") is a client-side ssh library that offers low level performance through a high level interface. diff --git a/benchmarks/bench_fabric.py b/benchmarks/bench_fabric.py index 01de77c..a231d79 100644 --- a/benchmarks/bench_fabric.py +++ b/benchmarks/bench_fabric.py @@ -1,8 +1,9 @@ import json -import memray -import timeit -from pprint import pprint from pathlib import Path +from pprint import pprint +import timeit + +import memray results_dict = {} @@ -71,7 +72,7 @@ pprint(results_dict, sort_dicts=False) if Path("bench_results.json").exists(): - results = json.loads(Path("bench_results.json").read_text()) + results = json.loads(Path("bench_results.json").read_text()) else: results = {} results.update({"fabric": results_dict}) diff --git a/benchmarks/bench_hussh.py b/benchmarks/bench_hussh.py index 3f4623b..9ee174f 100644 --- a/benchmarks/bench_hussh.py +++ b/benchmarks/bench_hussh.py @@ -1,8 +1,9 @@ import json -import memray -import timeit -from pprint import pprint from pathlib import Path +from pprint import pprint +import timeit + +import memray results_dict = {} @@ -11,6 +12,7 @@ with memray.Tracker("memray-bench_hussh.bin"): start_time = timeit.default_timer() from hussh import Connection + results_dict["import_time"] = f"{(timeit.default_timer() - start_time) * 1000:.2f} ms" host_info = json.loads(Path("target.json").read_text()) @@ -62,7 +64,7 @@ pprint(results_dict, sort_dicts=False) if Path("bench_results.json").exists(): - results = json.loads(Path("bench_results.json").read_text()) + results = json.loads(Path("bench_results.json").read_text()) else: results = {} results.update({"hussh": results_dict}) diff --git a/benchmarks/bench_paramiko.py b/benchmarks/bench_paramiko.py index ee922c0..8c4db5b 100644 --- a/benchmarks/bench_paramiko.py +++ b/benchmarks/bench_paramiko.py @@ -1,8 +1,9 @@ import json -import memray -import timeit -from pprint import pprint from pathlib import Path +from pprint import pprint +import timeit + +import memray results_dict = {} @@ -11,6 +12,7 @@ with memray.Tracker("memray-bench_paramiko.bin"): start_time = timeit.default_timer() import paramiko + results_dict["import_time"] = f"{(timeit.default_timer() - start_time) * 1000:.2f} ms" host_info = json.loads(Path("target.json").read_text()) @@ -33,7 +35,6 @@ result = stdout.read() results_dict["cmd_time"] = f"{(timeit.default_timer() - temp_time) * 1000:.2f} ms" - # small file (1kb) temp_time = timeit.default_timer() sftp = ssh.open_sftp() @@ -73,7 +74,7 @@ pprint(results_dict, sort_dicts=False) if Path("bench_results.json").exists(): - results = json.loads(Path("bench_results.json").read_text()) + results = json.loads(Path("bench_results.json").read_text()) else: results = {} results.update({"paramiko": results_dict}) diff --git a/benchmarks/bench_ssh2-python.py b/benchmarks/bench_ssh2-python.py index a060a99..295f91e 100644 --- a/benchmarks/bench_ssh2-python.py +++ b/benchmarks/bench_ssh2-python.py @@ -1,8 +1,9 @@ import json -import memray -import timeit -from pprint import pprint from pathlib import Path +from pprint import pprint +import timeit + +import memray results_dict = {} @@ -11,8 +12,10 @@ with memray.Tracker("memray-bench_ssh2-python.bin"): start_time = timeit.default_timer() import socket - from ssh2.session import Session + from ssh2 import sftp + from ssh2.session import Session + results_dict["import_time"] = f"{(timeit.default_timer() - start_time) * 1000:.2f} ms" host_info = json.loads(Path("target.json").read_text()) @@ -50,9 +53,7 @@ | sftp.LIBSSH2_SFTP_S_IRGRP | sftp.LIBSSH2_SFTP_S_IROTH ) - FILE_FLAGS = ( - sftp.LIBSSH2_FXF_CREAT | sftp.LIBSSH2_FXF_WRITE | sftp.LIBSSH2_FXF_TRUNC - ) + FILE_FLAGS = sftp.LIBSSH2_FXF_CREAT | sftp.LIBSSH2_FXF_WRITE | sftp.LIBSSH2_FXF_TRUNC data = Path("1kb.txt").read_bytes() sftp_conn = session.sftp_init() with sftp_conn.open("/root/1kb.txt", FILE_FLAGS, SFTP_MODE) as f: @@ -61,7 +62,7 @@ temp_time = timeit.default_timer() with sftp_conn.open("/root/1kb.txt", sftp.LIBSSH2_FXF_READ, sftp.LIBSSH2_SFTP_S_IRUSR) as f: - read_data= b"" + read_data = b"" for _rc, data in f: read_data += data Path("small.txt").write_bytes(read_data) @@ -77,7 +78,7 @@ temp_time = timeit.default_timer() with sftp_conn.open("/root/14kb.txt", sftp.LIBSSH2_FXF_READ, sftp.LIBSSH2_SFTP_S_IRUSR) as f: - read_data= b"" + read_data = b"" for _rc, data in f: read_data += data Path("medium.txt").write_bytes(read_data) @@ -93,7 +94,7 @@ temp_time = timeit.default_timer() with sftp_conn.open("/root/64kb.txt", sftp.LIBSSH2_FXF_READ, sftp.LIBSSH2_SFTP_S_IRUSR) as f: - read_data= b"" + read_data = b"" for _rc, data in f: read_data += data Path("large.txt").write_bytes(read_data) @@ -105,7 +106,7 @@ pprint(results_dict, sort_dicts=False) if Path("bench_results.json").exists(): - results = json.loads(Path("bench_results.json").read_text()) + results = json.loads(Path("bench_results.json").read_text()) else: results = {} results.update({"ssh2-python": results_dict}) diff --git a/benchmarks/run_benchmarks.py b/benchmarks/run_benchmarks.py index f1f8d51..8a7fa7c 100644 --- a/benchmarks/run_benchmarks.py +++ b/benchmarks/run_benchmarks.py @@ -1,15 +1,18 @@ -from pathlib import Path import json +from pathlib import Path import subprocess + from rich.console import Console from rich.table import Table + def run_all(): """Find all the python files in this directory starting with bench_ and run them.""" for file in Path(__file__).parent.glob("bench_*.py"): print(f"Running {file}") subprocess.run(["python", file], check=True) + def run_memray_reports(report_dict): """Find all memray reports, run them, then delete them.""" for file in Path(__file__).parent.glob("memray-*.bin"): @@ -20,10 +23,13 @@ def run_memray_reports(report_dict): file.unlink() # load the new json file results = json.loads(json_file.read_text()) - report_dict[lib]["peak_memory"] = f'{results["metadata"]["peak_memory"] / 1024 / 1024:.2f} MB' + report_dict[lib]["peak_memory"] = ( + f'{results["metadata"]["peak_memory"] / 1024 / 1024:.2f} MB' + ) report_dict[lib]["allocations"] = str(results["metadata"]["total_allocations"]) json_file.unlink() + def print_report(report_dict): """Print out the report in a rich table""" report_table = Table(title="Benchmark Report") @@ -35,6 +41,7 @@ def print_report(report_dict): report_table.add_row(lib, *[report_dict[lib][key] for key in report_dict[lib]]) Console().print(report_table) + if __name__ == "__main__": run_all() report_dict = json.loads(Path("bench_results.json").read_text()) diff --git a/pyproject.toml b/pyproject.toml index be3f875..f22fc13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,13 +34,64 @@ dynamic = ["version"] [project.optional-dependencies] dev = [ - "ruff", - "maturin", - "pytest", "docker", - "pexpect", + "maturin", "patchelf; sys_platform == 'linux'", + "pexpect", + "pre-commit", + "pytest", + "ruff", ] [tool.maturin] features = ["pyo3/extension-module"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = ["-v", "-l", "--color=yes", "--code-highlight=yes"] + +[tool.ruff] +line-length = 99 +target-version = "py311" + +[tool.ruff.lint] +fixable = ["ALL"] +select = [ + "B", # bugbear + "C90", # mccabe + "E", # pycodestyle + "F", # flake8 + "G", # flake8-logging-format + "I", # isort + "PERF", # Perflint rules + "PLC", # pylint + "PLE", # pylint + "PLR", # pylint + "PLW", # pylint + "PTH", # Use pathlib + "PT", # flake8-pytest + "RET", # flake8-return + "RUF", # Ruff-specific rules + "SIM", # flake8-simplify + "UP", # pyupgrade + "W", # pycodestyle +] +ignore = [ + "B019", # lru_cache can lead to memory leaks - acceptable tradeoff + "PT004", # pytest underscrore prefix for non-return fixtures + "PT005", # pytest no underscrore prefix for return fixtures +] + +[tool.ruff.lint.isort] +force-sort-within-sections = true +known-first-party = [ + "hussh", +] +combine-as-imports = true + +[tool.ruff.lint.flake8-pytest-style] +fixture-parentheses = false +mark-parentheses = false + +[tool.ruff.lint.mccabe] +max-complexity = 20 diff --git a/src/connection.rs b/src/connection.rs index e32c681..834ae29 100644 --- a/src/connection.rs +++ b/src/connection.rs @@ -226,7 +226,9 @@ impl Connection { #[pymethods] impl Connection { #[new] - #[pyo3(text_signature = "(host, /, port=22, username='root', password=None, private_key=None, timeout=0)")] + #[pyo3( + text_signature = "(host, /, port=22, username='root', password=None, private_key=None, timeout=0)" + )] fn new( host: String, port: Option, @@ -355,7 +357,7 @@ impl Connection { if bytes_read == 0 { break; } - remote_file.write(&read_buffer[..bytes_read]).unwrap(); + remote_file.write_all(&read_buffer[..bytes_read]).unwrap(); } remote_file.flush().unwrap(); remote_file.send_eof().unwrap(); @@ -421,7 +423,7 @@ impl Connection { if bytes_read == 0 { break; } - remote_file.write(&read_buffer[..bytes_read])?; + remote_file.write_all(&read_buffer[..bytes_read])?; } remote_file.close().unwrap(); Ok(()) @@ -572,7 +574,6 @@ impl InteractiveShell { } } - /// `FileTailer` is a structure that represents a remote file tailer. /// /// It maintains an SFTP connection and the path to a remote file, @@ -608,11 +609,7 @@ struct FileTailer { #[pymethods] impl FileTailer { #[new] - fn new( - conn: &Connection, - remote_file: String, - init_pos: Option, - ) -> FileTailer { + fn new(conn: &Connection, remote_file: String, init_pos: Option) -> FileTailer { FileTailer { sftp_conn: conn.session.sftp().unwrap(), remote_file, @@ -624,9 +621,13 @@ impl FileTailer { // Determine the current end of the remote file fn seek_end(&mut self) -> PyResult> { - let len = self.sftp_conn.stat(Path::new(&self.remote_file)).unwrap().size; + let len = self + .sftp_conn + .stat(Path::new(&self.remote_file)) + .unwrap() + .size; self.last_pos = len.unwrap(); - if !self.init_pos.is_some() { + if self.init_pos.is_none() { self.init_pos = len; } Ok(len) @@ -635,13 +636,14 @@ impl FileTailer { // Read the contents of the remote file from a given position fn read(&mut self, from_pos: Option) -> String { let from_pos = from_pos.unwrap_or(self.last_pos); - let mut remote_file = BufReader::new( - self.sftp_conn.open(Path::new(&self.remote_file)).unwrap(), - ); - remote_file.seek(std::io::SeekFrom::Start(from_pos)).unwrap(); + let mut remote_file = + BufReader::new(self.sftp_conn.open(Path::new(&self.remote_file)).unwrap()); + remote_file + .seek(std::io::SeekFrom::Start(from_pos)) + .unwrap(); let mut contents = String::new(); remote_file.read_to_string(&mut contents).unwrap(); - self.last_pos = remote_file.seek(std::io::SeekFrom::Current(0)).unwrap(); + self.last_pos = remote_file.stream_position().unwrap(); contents } @@ -659,4 +661,4 @@ impl FileTailer { self.tailed_contents = Some(self.read(self.init_pos)); Ok(()) } -} \ No newline at end of file +} diff --git a/tests/conftest.py b/tests/conftest.py index 28cd3db..06e2a16 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,9 @@ """Common setup for Hussh tests.""" import os +from pathlib import PurePath import subprocess import time -from pathlib import PurePath import docker import pexpect @@ -74,9 +74,7 @@ def setup_agent_auth(): # Start the ssh-agent and get the environment variables output = subprocess.check_output(["ssh-agent", "-s"]) - env = dict( - line.split("=", 1) for line in output.decode().splitlines() if "=" in line - ) + env = dict(line.split("=", 1) for line in output.decode().splitlines() if "=" in line) # Set the SSH_AUTH_SOCK and SSH_AGENT_PID environment variables os.environ["SSH_AUTH_SOCK"] = env["SSH_AUTH_SOCK"] @@ -84,7 +82,9 @@ def setup_agent_auth(): # Add the keys to the ssh-agent # subprocess.run(["ssh-add", str(base_key)], check=True) - result = subprocess.run(["ssh-add", str(base_key)], capture_output=True, text=True) + result = subprocess.run( + ["ssh-add", str(base_key)], capture_output=True, text=True, check=False + ) print(result.stdout) print(result.stderr) # The auth_key is password protected diff --git a/tests/test_connection.py b/tests/test_connection.py index ce48e2a..0f988ad 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,9 +1,10 @@ """Tests for hussh.connection module.""" -import pytest from pathlib import Path -from hussh import Connection, SSHResult +import pytest + +from hussh import Connection, SSHResult TEXT_FILE = Path("tests/data/hp.txt").resolve() IMG_FILE = Path("tests/data/puppy.jpeg").resolve() @@ -167,9 +168,10 @@ def test_remote_copy(conn, run_second_server): def test_tail(conn): """Test that we can tail a file.""" - conn.scp_write_data("hello\nworld\n", "/root/hello.txt") + TEST_STR = "hello\nworld\n" + conn.scp_write_data(TEST_STR, "/root/hello.txt") with conn.tail("/root/hello.txt") as tf: - assert tf.read(0) == "hello\nworld\n" - assert tf.last_pos == 12 + assert tf.read(0) == TEST_STR + assert tf.last_pos == len(TEST_STR) conn.execute("echo goodbye >> /root/hello.txt") assert tf.tailed_contents == "goodbye\n"