Skip to content

Add readthedocs configuration #35

Add readthedocs configuration

Add readthedocs configuration #35

Workflow file for this run

name: CI
on:
push:
branches:
- master
paths-ignore:
- '**.md'
pull_request:
branches:
- master
paths-ignore:
- '**.md'
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version:
# - '3.6' (see https://github.com/actions/setup-python/issues/544)
- '3.7'
- '3.8'
- '3.9'
# - '3.10' 3.10+ require moving off of nose (https://github.com/nose-devs/nose/issues/1099)
# - '3.11'
steps:
- name: Check out
uses: actions/checkout@v3
- name: Setup Java
uses: actions/setup-java@v3
with:
distribution: 'adopt'
java-version: '8'
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download Hadoop
run: |
echo "HADOOP_HOME=$(./scripts/hadoop.sh download)" >>"$GITHUB_ENV"
- name: Configure Hadoop
run: |
echo "HADOOP_CONF_DIR=$(./scripts/hadoop.sh config)" >>"$GITHUB_ENV"
- name: Start HDFS
run: |
./scripts/hadoop.sh start
echo "WEBHDFS_URL=http://$("$HADOOP_HOME/bin/hdfs" getconf -confKey dfs.namenode.http-address)" >>"$GITHUB_ENV"
echo "HTTPFS_URL=http://localhost:14000" >>"$GITHUB_ENV"
sleep 5 # TODO: Find a better way to wait for all datanodes to become reachable.
- name: Install
run: pip install .[avro] coverage mock nose
- name: Test on WebHDFS
run: HDFSCLI_TEST_URL="$WEBHDFS_URL" python -m nose --with-coverage --cover-package=hdfs
- name: Test on HTTPFS
run: HDFSCLI_TEST_URL="$HTTPFS_URL" HDFSCLI_NOSNAPSHOT=1 python -m nose --with-coverage --cover-package=hdfs
- name: Stop HDFS
if: always()
run: ./scripts/hadoop.sh stop