sw-jobs-api/scrapers/Dockerfile

33 lines
867 B
Docker
Raw Normal View History

2024-05-29 21:28:58 +00:00
# Use an official Python runtime as a parent image
FROM python:3.9-slim
# Set the working directory in the container
WORKDIR /app
# Copy the current directory contents into the container at /app
COPY . /app
2024-05-29 22:28:56 +00:00
# Ensure cron is installed
RUN apt get update && apt get install -y cron
# Install any needed packages specified in requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
2024-05-29 21:28:58 +00:00
# Copy the crontab file to the cron.d directory
COPY crontab /etc/cron.d/scraper-cron
# Give execution rights on the cron job
RUN chmod 0644 /etc/cron.d/scraper-cron
# Create the log file to be able to run tail
RUN touch /var/log/cron.log
# Copy the script to run the scraper
COPY run_scraper.sh /usr/local/bin/run_scraper.sh
# Grant execution rights to the script
RUN chmod +x /usr/local/bin/run_scraper.sh
# Run the command on container startup
CMD ["cron", "-f"]