Skip to content

Commit

Permalink
Reverted scrapers to run without main
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshD94 committed Nov 21, 2024
1 parent 7f287f3 commit 9f92efe
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 45 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ jobs:
sudo systemctl restart nginx
docker stack deploy -c docker-compose.yml the-stack --with-registry-auth
sleep 60s
docker exec $(docker ps -q -f name=the-stack_web) psql $DB_URL -c "DELETE FROM alembic_version;"
docker exec $(docker ps -q -f name=the-stack_web) flask db stamp head
docker exec $(docker ps -q -f name=the-stack_web) flask db current
attempt=1
max_attempts=5
until docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade || [ $attempt -eq $max_attempts ]
Expand Down
87 changes: 43 additions & 44 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,15 @@
from graphql.utils import schema_printer
from src.database import db_session, init_db
from src.database import Base as db
from src.scrapers.capacities_scraper import fetch_capacities
from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility
from src.scrapers.scraper_helpers import clean_past_hours
from src.scrapers.sp_hours_scraper import fetch_sp_facility
from src.scrapers.equipment_scraper import scrape_equipment
from src.scrapers.class_scraper import fetch_classes
from src.scrapers.activities_scraper import fetch_activity
from src.utils.utils import create_gym_table
from src.models.openhours import OpenHours
from flask_migrate import Migrate
from src.schema import Query, Mutation
from src.database import db_url, db_user, db_password, db_name, db_host, db_port
Expand Down Expand Up @@ -61,61 +70,51 @@ def shutdown_session(exception=None):
db_session.remove()


# Create schema.graphql
with open("schema.graphql", "w+") as schema_file:
schema_file.write(schema_printer.print_schema(schema))
schema_file.close()
# Scrape hours every 15 minutes

if __name__ == "__main__":
print("Starting app...")
from src.scrapers.capacities_scraper import fetch_capacities
from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility
from src.scrapers.scraper_helpers import clean_past_hours
from src.scrapers.sp_hours_scraper import fetch_sp_facility
from src.scrapers.equipment_scraper import scrape_equipment
from src.scrapers.class_scraper import fetch_classes
from src.scrapers.activities_scraper import fetch_activity
from src.utils.utils import create_gym_table
from src.models.openhours import OpenHours
# Scrape hours every 15 minutes
@scheduler.task("interval", id="scrape_hours", seconds=900)
def scrape_hours():
logging.info("Scraping hours from sheets...")

@scheduler.task("interval", id="scrape_hours", seconds=900)
def scrape_hours():
logging.info("Scraping hours from sheets...")
# Clear hours
db_session.query(OpenHours).delete()

# Clear hours
db_session.query(OpenHours).delete()
fetch_reg_facility()
fetch_reg_building()
fetch_sp_facility()
clean_past_hours()

fetch_reg_facility()
fetch_reg_building()
fetch_sp_facility()
clean_past_hours()
# Scrape capacities every 10 minutes

# Scrape capacities every 10 minutes
@scheduler.task("interval", id="scrape_capacities", seconds=600)
def scrape_capacities():
logging.info("Scraping capacities from C2C...")

@scheduler.task("interval", id="scrape_capacities", seconds=600)
def scrape_capacities():
logging.info("Scraping capacities from C2C...")
fetch_capacities()

fetch_capacities()
# Scrape classes every hour

# Scrape classes every hour
@scheduler.task("interval", id="scrape_classes", seconds=3600)
def scrape_classes():
logging.info("Scraping classes from group-fitness-classes...")

@scheduler.task("interval", id="scrape_classes", seconds=3600)
def scrape_classes():
logging.info("Scraping classes from group-fitness-classes...")
fetch_classes(10)

fetch_classes(10)
# Create database and fill it with data
init_db()
create_gym_table()

# Create database and fill it with data
init_db()
create_gym_table()
scrape_classes()
scrape_hours()
scrape_capacities()
scrape_equipment()
logging.info("Scraping activities from sheets...")
fetch_activity()

scrape_classes()
scrape_hours()
scrape_capacities()
scrape_equipment()
logging.info("Scraping activities from sheets...")
fetch_activity()
# Create schema.graphql
with open("schema.graphql", "w+") as schema_file:
schema_file.write(schema_printer.print_schema(schema))
schema_file.close()

if __name__ == "__main__":
app.run(host="127.0.0.1", port=5000)

0 comments on commit 9f92efe

Please sign in to comment.