Web Crawler #13
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Web Crawler | |
on: | |
push: | |
branches: | |
- main | |
schedule: | |
- cron: '0 * * * *' | |
workflow_dispatch: | |
jobs: | |
crawl_and_commit: | |
runs-on: ubuntu-latest | |
permissions: | |
contents: write | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: '3.x' | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install -r requirements.txt | |
- name: Run web crawling script and check for changes | |
id: crawl_step | |
run: | | |
python crawl.py | |
continue-on-error: true | |
- name: Commit crawled files if there are changes | |
if: success() | |
run: | | |
git config --global user.name "github-actions[bot]" | |
git config --global user.email "github-actions[bot]@users.noreply.github.com" | |
# Stage the entire directory for the commit | |
git add crawled_output | |
git commit -m "chore: Update crawled data" || echo "No changes to commit." |