Skip to content

Scrape

Scrape #66

Workflow file for this run

name: Scrape
on:
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
env:
ACTIONS_ALLOW_UNSECURE_COMMANDS: true
jobs:
scrape-latest:
runs-on: windows-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install requirements
run: pip install -r requirements.txt
- name: Run Scraper For January
run: python use_graphql_scraper.py --month=1
- name: Run Scraper For February
run: python use_graphql_scraper.py --month=2
- name: Run Scraper For March
run: python use_graphql_scraper.py --month=3
- name: Run Scraper For April
run: python use_graphql_scraper.py --month=4
- name: Run Scraper For May
run: python use_graphql_scraper.py --month=5
- name: Run Scraper For June
run: python use_graphql_scraper.py --month=6
- name: Run Scraper For July
run: python use_graphql_scraper.py --month=7
- name: Run Scraper For August
run: python use_graphql_scraper.py --month=8
- name: Run Scraper For September
run: python use_graphql_scraper.py --month=9
- name: Run Scraper For October
run: python use_graphql_scraper.py --month=10
- name: Run Scraper For November
run: python use_graphql_scraper.py --month=11
- name: Run Scraper For December
run: python use_graphql_scraper.py --month=12
- id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
credentials_json: ${{ secrets.GCP_CREDENTIALS }}
- id: 'upload-file'
uses: 'google-github-actions/upload-cloud-storage@v2'
with:
path: './'
destination: 'weekly_osaka_hotel_data'
glob: 'scraped_hotel_data_csv/*.csv'