diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index a159ac8..da074eb 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -39,11 +39,11 @@ jobs: touch tags source tags export IMAGE_TAG=${{ steps.vars.outputs.sha_short }} - echo "export IMAGE_TAG=${IMAGE_TAG}" > tags cd docker-compose docker stack rm the-stack sleep 20s sudo systemctl stop nginx sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth - yes | docker system prune -a \ No newline at end of file + sleep 60s + yes | docker system prune -a diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 8920685..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: test - -on: - push: - branches: - - master - - release/* - pull_request: - branches: - - master - - release/* - -jobs: - build: - runs-on: ubuntu-latest - - services: - postgres: - image: postgres:latest - env: - POSTGRES_DB: uplift - POSTGRES_PASSWORD: password - POSTGRES_USER: local - ports: - - 5432:5432 - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 - with: - python-version: 3.9 - - name: install and test - run: | - sudo apt-get update - sudo apt-get install libxml2-dev libxslt-dev - sudo apt-get install --yes --no-install-recommends postgresql-client - pip install --force-reinstall pip==20.0.2 - pip install --force-reinstall setuptools==44.0.0 - pip freeze - pip install -r requirements.txt - python -m unittest src.tests.test_scraper - env: - DB_HOST: localhost - DB_NAME: uplift - DB_PORT: 5432 - DB_PASSWORD: password - DB_USERNAME: local - FLASK_ENV: dev - GOOGLE_SERVICE_ACCOUNT_PATH: service-account-key.json diff --git a/.gitignore b/.gitignore index 8ff10ce..e78072a 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,6 @@ build/ *.DS_Store .env .envrc -migrations .vscode Archive scripts diff --git a/Dockerfile b/Dockerfile index ec3e3b0..30de6de 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,4 +6,4 @@ COPY . . ENV MAX_CONCURRENT_PIP=4 RUN pip3 install --upgrade pip RUN pip3 install --exists-action w -r requirements.txt -CMD python3 app.py +CMD flask --app migrations db upgrade && python3 app.py \ No newline at end of file diff --git a/app.db b/app.db new file mode 100644 index 0000000..d13a36b Binary files /dev/null and b/app.db differ diff --git a/app.py b/app.py index 6fb7b04..70a7c30 100644 --- a/app.py +++ b/app.py @@ -1,107 +1,17 @@ -import logging +from app_factory import create_app import sentry_sdk -from flask import Flask, render_template -from flask_apscheduler import APScheduler -from flask_graphql import GraphQLView -from graphene import Schema -from graphql.utils import schema_printer -from src.database import db_session, init_db -from src.schema import Query, Mutation -from src.scrapers.capacities_scraper import fetch_capacities -from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility -from src.scrapers.scraper_helpers import clean_past_hours -from src.scrapers.sp_hours_scraper import fetch_sp_facility -from src.scrapers.equipment_scraper import scrape_equipment -from src.scrapers.class_scraper import fetch_classes -from src.scrapers.activities_scraper import fetch_activity -from src.utils.utils import create_gym_table -from src.models.openhours import OpenHours -from flasgger import Swagger +import os +# Initialize Sentry only if not in local +if os.environ.get('FLASK_ENV') in ["development", "production"]: + sentry_sdk.init( + dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", + traces_sample_rate=1.0, + profiles_sample_rate=1.0, + ) -sentry_sdk.init( - dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", - # Set traces_sample_rate to 1.0 to capture 100% - # of transactions for tracing. - traces_sample_rate=1.0, - # Set profiles_sample_rate to 1.0 to profile 100% - # of sampled transactions. - # We recommend adjusting this value in production. - profiles_sample_rate=1.0, -) - -app = Flask(__name__) -app.debug = True -schema = Schema(query=Query, mutation=Mutation) -swagger = Swagger(app) - -# Scheduler -scheduler = APScheduler() -scheduler.init_app(app) -scheduler.start() - -# Logging -logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") - - -@app.route("/") -def index(): - return render_template("index.html") - - -app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) - - -@app.teardown_appcontext -def shutdown_session(exception=None): - db_session.remove() - - -# Scrape hours every 15 minutes -@scheduler.task("interval", id="scrape_hours", seconds=900) -def scrape_hours(): - logging.info("Scraping hours from sheets...") - - # Clear hours - db_session.query(OpenHours).delete() - - fetch_reg_facility() - fetch_reg_building() - fetch_sp_facility() - clean_past_hours() - - -# Scrape capacities every 10 minutes -@scheduler.task("interval", id="scrape_capacities", seconds=600) -def scrape_capacities(): - logging.info("Scraping capacities from C2C...") - - fetch_capacities() - - -# Scrape classes every hour -@scheduler.task("interval", id="scrape_classes", seconds=3600) -def scrape_classes(): - logging.info("Scraping classes from group-fitness-classes...") - - - fetch_classes(10) - - -# Create database and fill it with data -init_db() -create_gym_table() -scrape_classes() -scrape_hours() -scrape_capacities() -scrape_equipment() - -logging.info("Scraping activities from sheets...") -fetch_activity() -# Create schema.graphql -with open("schema.graphql", "w+") as schema_file: - schema_file.write(schema_printer.print_schema(schema)) - schema_file.close() +# Create Flask app with scrapers enabled +app = create_app(run_migrations=False) if __name__ == "__main__": - app.run(host="127.0.0.1", port=5000) + app.run(host="0.0.0.0", port=5000) diff --git a/app_factory.py b/app_factory.py new file mode 100644 index 0000000..9754b75 --- /dev/null +++ b/app_factory.py @@ -0,0 +1,235 @@ +import logging +from datetime import timedelta, timezone +from flask_jwt_extended import JWTManager +from datetime import datetime +from flask import Flask, render_template +from graphene import Schema +from graphql.utils import schema_printer +from src.utils.constants import JWT_SECRET_KEY +from src.database import db_session, init_db +from src.database import Base as db +from src.database import db_url, db_user, db_password, db_name, db_host, db_port +from flask_migrate import Migrate +from src.schema import Query, Mutation +from flasgger import Swagger +from flask_graphql import GraphQLView +from src.models.token_blacklist import TokenBlocklist + + +# Set up logging at module level +logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") +logger = logging.getLogger(__name__) + + +def create_app(run_migrations=False): + """ + Application factory for Flask app. + + Args: + run_migrations: If True, configure app for migrations only (no scrapers) + + Returns: + Configured Flask application + """ + logger.info("Initializing application") + + # Create and configure Flask app + app = Flask(__name__) + app.debug = True + logger.info("Flask app created with debug=%s", app.debug) + + # Verify all required database variables are present + if not all([db_user, db_password, db_name, db_host, db_port]): + logger.error("Missing required database configuration variables") + raise ValueError( + "Missing required database configuration. " "Please ensure all database environment variables are set." + ) + + # Configure database + logger.info("Configuring database connection to %s:%s/%s", db_host, db_port, db_name) + app.config["SQLALCHEMY_DATABASE_URI"] = db_url + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + + # Set up extensions + logger.info("Setting up Flask extensions") + migrate = Migrate(app, db) + schema = Schema(query=Query, mutation=Mutation) + swagger = Swagger(app) + + app.config["JWT_SECRET_KEY"] = JWT_SECRET_KEY + app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(hours=1) + app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=30) + + jwt = JWTManager(app) + + @jwt.token_in_blocklist_loader + def check_if_token_revoked(jwt_header, jwt_payload: dict) -> bool: + jti = jwt_payload["jti"] + return db_session.query(TokenBlocklist.id).filter_by(jti=jti).scalar() is not None + + # Configure routes + logger.info("Configuring routes") + + @app.route("/") + def index(): + return render_template("index.html") + + app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) + + @app.teardown_appcontext + def shutdown_session(exception=None): + db_session.remove() + + # Initialize database + logger.info("Initializing database") + init_db() + + # Create schema.graphql + logger.info("Generating GraphQL schema file") + with open("schema.graphql", "w+") as schema_file: + schema_file.write(schema_printer.print_schema(schema)) + schema_file.close() + + # Configure and run scrapers if not in migration mode + if not run_migrations: + logger.info("Setting up scrapers and scheduled tasks") + setup_scrapers(app) + else: + logger.info("Running in migration mode - scrapers disabled") + + logger.info("Application initialization complete") + return app + + +def setup_scrapers(app): + """Set up scrapers and scheduled tasks""" + # Import scraper-related modules only when needed + from flask_apscheduler import APScheduler + from src.scrapers.capacities_scraper import fetch_capacities, update_hourly_capacity + from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility + from src.scrapers.scraper_helpers import clean_past_hours + from src.scrapers.sp_hours_scraper import fetch_sp_facility + from src.scrapers.equipment_scraper import scrape_equipment + from src.scrapers.class_scraper import fetch_classes + from src.scrapers.activities_scraper import fetch_activity + from src.utils.utils import create_gym_table + from src.models.openhours import OpenHours + import os + + logger = logging.getLogger(__name__) + logger.info("Beginning scraper configuration") + + # Initialize scheduler + scheduler = APScheduler() + logger.info("APScheduler initialized") + + # Scrape hours every 15 minutes + @scheduler.task("interval", id="scrape_hours", seconds=900) + def scrape_hours(): + job = scheduler.get_job("scrape_hours") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_hours (trigger: interval[0:15:00], next run at: %s EST)"', next_run) + try: + logging.info("Scraping hours from sheets...") + # Clear hours + db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + logging.info( + 'Job "scrape_hours (trigger: interval[0:15:00], next run at: %s EST)" executed successfully', next_run + ) + except Exception as e: + logging.error(f"Error in scrape_hours: {e}") + + # Scrape capacities every 10 minutes + @scheduler.task("interval", id="scrape_capacities", seconds=600) + def scrape_capacities(): + job = scheduler.get_job("scrape_capacities") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)"', next_run) + try: + logging.info("Scraping capacities from C2C...") + fetch_capacities() + logging.info( + 'Job "scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)" executed successfully', + next_run, + ) + except Exception as e: + logging.error(f"Error in scrape_capacities: {e}") + + # Scrape classes every hour + @scheduler.task("interval", id="scrape_classes", seconds=3600) + def scrape_classes(): + job = scheduler.get_job("scrape_classes") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_classes (trigger: interval[1:00:00], next run at: %s EST)"', next_run) + try: + logging.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) + logging.info( + 'Job "scrape_classes (trigger: interval[1:00:00], next run at: %s EST)" executed successfully', next_run + ) + except Exception as e: + logging.error(f"Error in scrape_classes: {e}") + + @scheduler.task("interval", id="cleanup_expired_tokens", hours=24) + def cleanup_expired_tokens(): + logger.info("Deleting expired tokens...") + now = datetime.now(timezone.utc) + db_session.query(TokenBlocklist).filter(TokenBlocklist.expires_at < now).delete() + db_session.commit() + + # Update hourly average capacity every hour + @scheduler.task("cron", id="update_capacity", hour="*") + def scheduled_job(): + current_time = datetime.now() + current_day = current_time.strftime("%A").upper() + current_hour = current_time.hour + try: + logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") + update_hourly_capacity(current_day, current_hour) + except Exception as e: + logging.error(f"Error updating hourly average capacity for {current_day}, hour {current_hour}: {e}") + + # We're now handling job execution logging within each task function + + # Initialize scheduler + logger.info("Starting scheduler") + scheduler.init_app(app) + scheduler.start() + + # Run initial scraping + logger.info("Running initial scraping...") + try: + create_gym_table() + logger.info("Gym table created") + + logger.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) + logger.info("Initial class scraping complete") + + logger.info("Scraping hours from sheets...") + db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + logger.info("Initial hours scraping complete") + + logger.info("Scraping capacities from C2C...") + fetch_capacities() + logger.info("Initial capacities scraping complete") + + logger.info("Scraping equipment...") + scrape_equipment() + logger.info("Initial equipment scraping complete") + + logger.info("Scraping activities from sheets...") + fetch_activity() + logger.info("Initial activities scraping complete") + + logger.info("All initial scraping completed successfully") + except Exception as e: + logger.error(f"Error during initial scraping: {e}") diff --git a/manager.py b/manager.py deleted file mode 100644 index c38d51d..0000000 --- a/manager.py +++ /dev/null @@ -1,11 +0,0 @@ -from flask_script import Manager -from flask_migrate import MigrateCommand -from app import app # , db - -# Build manager -# migrate = Migrate(app, db) -manager = Manager(app) -manager.add_command("db", MigrateCommand) - -if __name__ == "__main__": - manager.run() diff --git a/migrations.py b/migrations.py new file mode 100644 index 0000000..5bf1f0a --- /dev/null +++ b/migrations.py @@ -0,0 +1,4 @@ +from app_factory import create_app + +# Create Flask app for migrations only (no scrapers) +app = create_app(run_migrations=True) diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..d676023 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,46 @@ +# A generic, single database configuration. + +[alembic] +script_location = migrations +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..e4f27ce --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,98 @@ +from __future__ import with_statement +import sys +import os +sys.path.append(os.path.abspath(os.path.dirname(__file__) + "/..")) +from app import app + +import logging +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +with app.app_context(): + config.set_main_option('sqlalchemy.url', app.config['SQLALCHEMY_DATABASE_URI']) + target_metadata = app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() \ No newline at end of file diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/.gitkeep b/migrations/versions/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py b/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py new file mode 100644 index 0000000..93b9235 --- /dev/null +++ b/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py @@ -0,0 +1,30 @@ +"""Added fitness center to workout model + +Revision ID: 0fde4435424e +Revises: 6b01a81bb92b +Create Date: 2025-03-06 20:50:25.488572 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0fde4435424e' +down_revision = '6b01a81bb92b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workout', sa.Column('facility_id', sa.Integer(), nullable=False)) + op.create_foreign_key(None, 'workout', 'facility', ['facility_id'], ['id']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'workout', type_='foreignkey') + op.drop_column('workout', 'facility_id') + # ### end Alembic commands ### diff --git a/migrations/versions/31b1fa20772f_popular_times.py b/migrations/versions/31b1fa20772f_popular_times.py new file mode 100644 index 0000000..6f50ab8 --- /dev/null +++ b/migrations/versions/31b1fa20772f_popular_times.py @@ -0,0 +1,56 @@ +"""popular times + +Revision ID: 31b1fa20772f +Revises: +Create Date: 2025-02-28 20:57:19.922403 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = '31b1fa20772f' +down_revision = None +branch_labels = None +depends_on = None + + +### Ensures alembic does not try to create enum +day_of_week_enum = postgresql.ENUM( + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', + name='dayofweekenum', create_type=False +) + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + + ### Alembic is running command multiple times. + op.execute(""" + DO $$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'hourly_average_capacity') THEN + CREATE TABLE hourly_average_capacity ( + id SERIAL PRIMARY KEY, + facility_id INTEGER NOT NULL REFERENCES facility(id), + average_percent FLOAT NOT NULL, + hour_of_day INTEGER NOT NULL, + day_of_week dayofweekenum, + history NUMERIC[] DEFAULT '{}' NOT NULL + ); + END IF; + END $$; + """) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + op.drop_table('hourly_average_capacity') + # ### end Alembic commands ### + diff --git a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py new file mode 100644 index 0000000..81d9121 --- /dev/null +++ b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py @@ -0,0 +1,30 @@ +"""Added active_streak and max_streak to users + +Revision ID: 6b01a81bb92b +Revises: 31b1fa20772f +Create Date: 2025-03-04 22:45:06.601964 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6b01a81bb92b' +down_revision = '31b1fa20772f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('users', sa.Column('active_streak', sa.Integer(), nullable=True)) + op.add_column('users', sa.Column('max_streak', sa.Integer(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users', 'active_streak') + op.drop_column('users', 'max_streak') + # ### end Alembic commands ### diff --git a/migrations/versions/7245f58bb00a_add_token_blacklist_table.py b/migrations/versions/7245f58bb00a_add_token_blacklist_table.py new file mode 100644 index 0000000..3607a6e --- /dev/null +++ b/migrations/versions/7245f58bb00a_add_token_blacklist_table.py @@ -0,0 +1,43 @@ +"""add token_blacklist table + +Revision ID: 7245f58bb00a +Revises: 0fde4435424e +Create Date: 2025-03-12 17:46:57.085233 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '7245f58bb00a' +down_revision = '0fde4435424e' +branch_labels = None +depends_on = None + + +def upgrade(): +# Create the token_blacklist table + op.execute(""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'token_blacklist' + ) THEN + CREATE TABLE token_blacklist ( + id SERIAL PRIMARY KEY, + jti VARCHAR(36) NOT NULL, + expires_at TIMESTAMP NOT NULL + ); + CREATE INDEX ix_token_blacklist_jti ON token_blacklist(jti); + END IF; + END $$; + """) +# Create an index on the jti column for faster lookups + + +def downgrade(): +# Drop the index and then the table + op.execute("DROP TABLE IF EXISTS token_blacklist;") diff --git a/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py b/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py new file mode 100644 index 0000000..6ce6601 --- /dev/null +++ b/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py @@ -0,0 +1,28 @@ +"""added encoded_image column to user + +Revision ID: add99ce06ff5 +Revises: 7245f58bb00a +Create Date: 2025-03-12 18:17:26.681109 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'add99ce06ff5' +down_revision = '7245f58bb00a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('users', sa.Column('encoded_image', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users', 'encoded_image') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index 87f0460..3d4cc15 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,6 @@ graphql-relay==0.4.5 graphql-server-core==1.1.1 greenlet==2.0.2 gspread==5.12.3 -gunicorn==19.9.0 identify==2.5.24 idna==2.6 importlib-metadata==6.7.0 @@ -79,4 +78,5 @@ wasmer-compiler-cranelift==1.1.0 wcwidth==0.2.6 Werkzeug==2.2.2 zipp==3.15.0 -sentry-sdk==2.13.0 \ No newline at end of file +sentry-sdk==2.13.0 +flask_jwt_extended==4.7.1 \ No newline at end of file diff --git a/schema.graphql b/schema.graphql index e44663b..a4aeaea 100644 --- a/schema.graphql +++ b/schema.graphql @@ -65,21 +65,13 @@ enum CourtType { BADMINTON } -type CreateGiveaway { - giveaway: Giveaway -} - type CreateReport { report: Report } -type CreateUser { - user: User -} - scalar DateTime -enum DayOfWeekEnum { +enum DayOfWeekGraphQLEnum { MONDAY TUESDAY WEDNESDAY @@ -92,22 +84,13 @@ enum DayOfWeekEnum { type Equipment { id: ID! name: String! - equipmentType: EquipmentType! + muscleGroups: [MuscleGroup]! + cleanName: String! facilityId: Int! quantity: Int accessibility: AccessibilityType } -enum EquipmentType { - CARDIO - RACKS_AND_BENCHES - SELECTORIZED - MULTI_CABLE - FREE_WEIGHTS - MISCELLANEOUS - PLATE_LOADED -} - type Facility { id: ID! facilityType: FacilityType! @@ -154,12 +137,53 @@ type Gym { reports: [Report] } +type HourlyAverageCapacity { + id: ID! + facilityId: Int! + averagePercent: Float! + hourOfDay: Int! + dayOfWeek: DayOfWeekGraphQLEnum + history: [Float]! +} + +scalar JSONString + +type LoginUser { + accessToken: String + refreshToken: String +} + +type LogoutUser { + success: Boolean +} + +enum MuscleGroup { + ABDOMINALS + CHEST + BACK + SHOULDERS + BICEPS + TRICEPS + HAMSTRINGS + QUADS + GLUTES + CALVES + MISCELLANEOUS + CARDIO +} + type Mutation { createGiveaway(name: String!): Giveaway - createUser(email: String!, name: String!, netId: String!): User + createUser(email: String!, encodedImage: String, name: String!, netId: String!): User + editUser(email: String, encodedImage: String, name: String, netId: String!): User enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User - logWorkout(userId: Int!, workoutTime: DateTime!): Workout + logWorkout(facilityId: Int!, userId: Int!, workoutTime: DateTime!): Workout + loginUser(netId: String!): LoginUser + logoutUser: LogoutUser + refreshAccessToken: RefreshAccessToken + createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport + deleteUser(userId: Int!): User } type OpenHours { @@ -190,10 +214,19 @@ enum PriceType { type Query { getAllGyms: [Gym] + getUserByNetId(netId: String): [User] getUsersByGiveawayId(id: Int): [User] getWeeklyWorkoutDays(id: Int): [String] getWorkoutsById(id: Int): [Workout] activities: [Activity] + getAllReports: [Report] + getWorkoutGoals(id: Int!): [String] + getUserStreak(id: Int!): JSONString + getHourlyAverageCapacitiesByFacilityId(facilityId: Int): [HourlyAverageCapacity] +} + +type RefreshAccessToken { + newAccessToken: String } type Report { @@ -202,9 +235,7 @@ type Report { description: String! gymId: Int! issue: ReportType! - userId: Int! gym: Gym - user: User } enum ReportType { @@ -217,16 +248,19 @@ enum ReportType { type User { id: ID! - email: String! + email: String netId: String! name: String! - workoutGoal: [DayOfWeekEnum] + activeStreak: Int + maxStreak: Int + workoutGoal: [DayOfWeekGraphQLEnum] + encodedImage: String giveaways: [Giveaway] - reports: [Report] } type Workout { id: ID! workoutTime: DateTime! userId: Int! + facilityId: Int! } diff --git a/src/database.py b/src/database.py index 7b89f42..951ff8c 100644 --- a/src/database.py +++ b/src/database.py @@ -3,27 +3,28 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker +# import dotenv +# dotenv.load_dotenv() -db_user = os.environ.get("DB_USERNAME") -db_password = os.environ.get("DB_PASSWORD") -db_name = os.environ.get("DB_NAME") -db_host = os.environ.get("DB_HOST") -db_port = os.environ.get("DB_PORT") - +# Get database credentials with logging +db_user = os.getenv("DB_USERNAME") +db_password = os.getenv("DB_PASSWORD") +db_name = os.getenv("DB_NAME") +db_host = os.getenv("DB_HOST") +db_port = os.getenv("DB_PORT", "5432") # Add default port db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}" + + engine = create_engine(db_url) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base = declarative_base() Base.query = db_session.query_property() - def init_db(): """ Initialize database for Uplift. """ logging.info("Initializing database") - - # Load initial data Base.metadata.create_all(bind=engine) db_session.commit() diff --git a/src/models/activity.py b/src/models/activity.py index 27e86fe..41a5c99 100644 --- a/src/models/activity.py +++ b/src/models/activity.py @@ -74,7 +74,7 @@ class Price(Base): id = Column(Integer, primary_key=True) activity_id = Column(Integer, ForeignKey("activity.id"), nullable=False) - cost = Column(Float, nullable=-False) + cost = Column(Float, nullable=False) name = Column(String, nullable=False) rate = Column(String) type = Column(Enum(PriceType), nullable=False) diff --git a/src/models/enums.py b/src/models/enums.py new file mode 100644 index 0000000..b88465b --- /dev/null +++ b/src/models/enums.py @@ -0,0 +1,22 @@ +import enum +from graphene import Enum as GrapheneEnum + +# SQLAlchemy Enum +class DayOfWeekEnum(enum.Enum): + MONDAY = "MONDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + THURSDAY = "THURSDAY" + FRIDAY = "FRIDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + +# GraphQL Enum +class DayOfWeekGraphQLEnum(GrapheneEnum): + MONDAY = "MONDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + THURSDAY = "THURSDAY" + FRIDAY = "FRIDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" \ No newline at end of file diff --git a/src/models/equipment.py b/src/models/equipment.py index 2c82b13..dbbefab 100644 --- a/src/models/equipment.py +++ b/src/models/equipment.py @@ -1,39 +1,44 @@ import enum -from sqlalchemy import Column, String, Enum, Integer, ForeignKey +from sqlalchemy import Column, String, Enum, Integer, ForeignKey, ARRAY +from sqlalchemy.orm import relationship from src.database import Base -class EquipmentType(enum.Enum): - - cardio = 0 - racks_and_benches = 1 - selectorized = 2 - multi_cable = 3 - free_weights = 4 - miscellaneous = 5 - plate_loaded = 6 +class MuscleGroup(enum.Enum): + ABDOMINALS = 1 # Core/Ab exercises + CHEST = 2 # Chest exercises + BACK = 3 # Back exercises + SHOULDERS = 4 # Shoulder exercises + BICEPS = 5 # Bicep exercises + TRICEPS = 6 # Tricep exercises + HAMSTRINGS = 7 # Hamstring exercises + QUADS = 8 # Quad exercises + GLUTES = 9 # Glute exercises + CALVES = 10 # Calf exercises + MISCELLANEOUS = 11 # General equipment, accessories, and multi-purpose items + CARDIO = 12 # Cardiovascular equipment class AccessibilityType(enum.Enum): wheelchair = 0 - class Equipment(Base): __tablename__ = "equipment" id = Column(Integer, primary_key=True) name = Column(String, nullable=False) - equipment_type = Column(Enum(EquipmentType), nullable=False) + muscle_groups = Column(ARRAY(Enum(MuscleGroup)), nullable=False) + clean_name = Column(String, nullable=False) facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) quantity = Column(Integer, nullable=True) accessibility = Column(Enum(AccessibilityType), nullable=True) - def __init__(self, **kwargs): - self.id = kwargs.get("id") - self.name = kwargs.get("name") - self.equipment_type = kwargs.get("equipment_type") - self.facility_id = kwargs.get("facility_id") - self.quantity = kwargs.get("quantity") - self.accessibility = kwargs.get("accessibility") +def __init__(self, **kwargs): + self.id = kwargs.get("id") + self.name = kwargs.get("name") + self.muscle_groups = kwargs.get("muscle_groups") + self.facility_id = kwargs.get("facility_id") + self.quantity = kwargs.get("quantity") + self.accessibility = kwargs.get("accessibility") diff --git a/src/models/hourly_average_capacity.py b/src/models/hourly_average_capacity.py new file mode 100644 index 0000000..a4f90c8 --- /dev/null +++ b/src/models/hourly_average_capacity.py @@ -0,0 +1,41 @@ +from sqlalchemy import Column, Integer, Float, ForeignKey, ARRAY, Enum +from src.models.enums import DayOfWeekEnum +from src.database import Base +from sqlalchemy.types import Numeric +from decimal import Decimal + + +class HourlyAverageCapacity(Base): + """ + Stores the average hourly capacity of a facility over the past 30 days. + + Attributes: + - `id` The ID of the hourly capacity record. + - `facility_id` The ID of the facility this capacity record belongs to. + - `average_percent` Average percent capacity of the facility, represented as a float between 0.0 and 1.0 + - `hour_of_day` The hour of the day this average is recorded for, in 24-hour format. + - `day_of_week` The day of the week this average is recorded for + - `history` Stores previous capacity data for this hour from (up to) the past 30 days. + """ + + __tablename__ = "hourly_average_capacity" + + id = Column(Integer, primary_key=True) + facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) + average_percent = Column(Float, nullable=False) + hour_of_day = Column(Integer, nullable=False) + day_of_week = Column(Enum(DayOfWeekEnum)) + history = Column(ARRAY(Numeric), nullable=False, default=[]) + + def update_hourly_average(self, current_percent): + new_capacity = Decimal(current_percent).quantize(Decimal('0.01')) + + if len(self.history) >= 30: + self.history = self.history[-29:] # Keep 29 newest records + + self.history = self.history + [new_capacity] if self.history else [new_capacity] + + total = 0 + for capacity in self.history: + total += capacity + self.average_percent = total / len(self.history) \ No newline at end of file diff --git a/src/models/report.py b/src/models/report.py index f7a7dfa..da8f84d 100644 --- a/src/models/report.py +++ b/src/models/report.py @@ -16,7 +16,6 @@ class Report(Base): Attributes: - `id` The ID of the report. - - `user_id` The ID of the user who created the report. - `issue` The issue reported (discrete options). - `description` The description of the report. - `created_at` The date and time the report was created. @@ -30,7 +29,5 @@ class Report(Base): description = Column(String, nullable=False) # Text input gym_id = Column(Integer, ForeignKey("gym.id"), nullable=False) # One to many relationship with gym issue = Column(Enum(ReportType), nullable=False) # Discrete options (enumerate) - user_id = Column(Integer, ForeignKey("users.id"), nullable=False) # Make relationship with gym and user gym = relationship("Gym", back_populates="reports") - user = relationship("User", back_populates="reports") diff --git a/src/models/token_blacklist.py b/src/models/token_blacklist.py new file mode 100644 index 0000000..7bd1386 --- /dev/null +++ b/src/models/token_blacklist.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, String, Integer, DateTime +from src.database import Base + + +class TokenBlocklist(Base): + """ + Represents a JWT token that has been revoked (blacklisted). + + Attributes: + - `id` The primary key of the token record. + - `jti` The unique identifier (JWT ID) of the token. Indexed for fast lookup. + - `expires_at` The DateTime when the token expires. + """ + + __tablename__ = "token_blacklist" + + id = Column(Integer, primary_key=True) + jti = Column(String(36), index=True, nullable=False) + expires_at = Column(DateTime, nullable=False) diff --git a/src/models/user.py b/src/models/user.py index f0f6bbc..9e608a7 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -1,19 +1,7 @@ -from sqlalchemy import Column, Integer, String, ARRAY -from sqlalchemy import Enum as SQLAEnum +from sqlalchemy import Column, Integer, String, ARRAY, Enum from sqlalchemy.orm import backref, relationship from src.database import Base -from enum import Enum - - -class DayOfWeekEnum(Enum): - MONDAY = "Monday" - TUESDAY = "Tuesday" - WEDNESDAY = "Wednesday" - THURSDAY = "Thursday" - FRIDAY = "Friday" - SATURDAY = "Saturday" - SUNDAY = "Sunday" - +from src.models.enums import DayOfWeekEnum class User(Base): """ @@ -26,14 +14,20 @@ class User(Base): - `net_id` The user's Net ID. - `name` The user's name. - `workout_goal` The days of the week the user has set as their personal goal. + - `active_streak` The number of consecutive weeks the user has met their personal goal. + - `max_streak` The maximum number of consecutive weeks the user has met their personal goal. + - `workout_goal` The max number of weeks the user has met their personal goal. + - `encoded_image` The profile picture URL of the user. """ __tablename__ = "users" id = Column(Integer, primary_key=True) - email = Column(String, nullable=False) + email = Column(String, nullable=True) giveaways = relationship("Giveaway", secondary="giveaway_instance", back_populates="users") - reports = relationship("Report", back_populates="user") net_id = Column(String, nullable=False) name = Column(String, nullable=False) - workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) + active_streak = Column(Integer, nullable=True) + max_streak = Column(Integer, nullable=True) + workout_goal = Column(ARRAY(Enum(DayOfWeekEnum)), nullable=True) + encoded_image = Column(String, nullable=True) \ No newline at end of file diff --git a/src/models/workout.py b/src/models/workout.py index 43f82a9..946b6f8 100644 --- a/src/models/workout.py +++ b/src/models/workout.py @@ -11,6 +11,7 @@ class Workout(Base): - `id` The ID of user. - `workout_time` The date and time of the workout. - `user_id` The ID of the user who completed the workout. + - `facility_id` The ID of the facility visited """ __tablename__ = "workout" @@ -18,3 +19,4 @@ class Workout(Base): id = Column(Integer, primary_key=True) workout_time = Column(DateTime(), nullable=False) # should this be nullable? user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) diff --git a/src/schema.py b/src/schema.py index f68cb1c..e3d27bb 100644 --- a/src/schema.py +++ b/src/schema.py @@ -1,5 +1,8 @@ import graphene -from datetime import datetime, timedelta +import os +from flask_jwt_extended import create_access_token, create_refresh_token, get_jwt_identity, get_jwt, jwt_required +from functools import wraps +from datetime import datetime, timedelta, timezone from graphene_sqlalchemy import SQLAlchemyObjectType from graphql import GraphQLError from src.models.capacity import Capacity as CapacityModel @@ -11,14 +14,18 @@ from src.models.activity import Activity as ActivityModel, Price as PriceModel from src.models.classes import Class as ClassModel from src.models.classes import ClassInstance as ClassInstanceModel +from src.models.token_blacklist import TokenBlocklist from src.models.user import User as UserModel -from src.models.user import DayOfWeekEnum +from src.models.enums import DayOfWeekGraphQLEnum from src.models.giveaway import Giveaway as GiveawayModel from src.models.giveaway import GiveawayInstance as GiveawayInstanceModel from src.models.workout import Workout as WorkoutModel from src.models.report import Report as ReportModel +from src.models.hourly_average_capacity import HourlyAverageCapacity as HourlyAverageCapacityModel from src.database import db_session - +import requests +import json +import os # MARK: - Gym @@ -115,6 +122,14 @@ class Meta: model = CapacityModel +# MARK - Hourly Average Capacity +class HourlyAverageCapacity(SQLAlchemyObjectType): + class Meta: + model = HourlyAverageCapacityModel + + day_of_week = graphene.Field(DayOfWeekGraphQLEnum) + + # MARK: - Price @@ -177,6 +192,8 @@ class User(SQLAlchemyObjectType): class Meta: model = UserModel + workout_goal = graphene.List(DayOfWeekGraphQLEnum) + class UserInput(graphene.InputObjectType): net_id = graphene.String(required=True) @@ -207,17 +224,40 @@ class Meta: model = WorkoutModel +# MARK: - Report + + +class Report(SQLAlchemyObjectType): + class Meta: + model = ReportModel + + gym = graphene.Field(lambda: Gym) + + def resolve_gym(self, info): + query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() + return query + + # MARK: - Query class Query(graphene.ObjectType): get_all_gyms = graphene.List(Gym, description="Get all gyms.") + get_user_by_net_id = graphene.List(User, net_id=graphene.String(), description="Get user by Net ID.") get_users_by_giveaway_id = graphene.List(User, id=graphene.Int(), description="Get all users given a giveaway ID.") get_weekly_workout_days = graphene.List( graphene.String, id=graphene.Int(), description="Get the days a user worked out for the current week." ) get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) + get_all_reports = graphene.List(Report, description="Get all reports.") + get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), + description="Get the workout goals of a user by ID.") + get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int( + required=True), description="Get the current and max workout streak of a user.") + get_hourly_average_capacities_by_facility_id = graphene.List( + HourlyAverageCapacity, facility_id=graphene.Int(), description="Get all facility hourly average capacities." + ) def resolve_get_all_gyms(self, info): query = Gym.get_query(info) @@ -227,11 +267,18 @@ def resolve_activities(self, info): query = Activity.get_query(info) return query.all() + def resolve_get_user_by_net_id(self, info, net_id): + user = User.get_query(info).filter(UserModel.net_id == net_id).all() + if not user: + raise GraphQLError("User with the given Net ID does not exist.") + return user + def resolve_get_users_by_giveaway_id(self, info, id): entries = GiveawayInstance.get_query(info).filter(GiveawayInstanceModel.giveaway_id == id).all() users = [User.get_query(info).filter(UserModel.id == entry.user_id).first() for entry in entries] return users + @jwt_required() def resolve_get_workouts_by_id(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -239,6 +286,7 @@ def resolve_get_workouts_by_id(self, info, id): workouts = Workout.get_query(info).filter(WorkoutModel.user_id == user.id).all() return workouts + @jwt_required() def resolve_get_weekly_workout_days(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -261,48 +309,207 @@ def resolve_get_weekly_workout_days(self, info, id): return list(workout_days_set) + def resolve_get_all_reports(self, info): + query = ReportModel.query.all() + return query -# MARK: - Report + @jwt_required() + def resolve_get_workout_goals(self, info, id): + user = User.get_query(info).filter(UserModel.id == id).first() + if not user: + raise GraphQLError("User with the given ID does not exist.") -class Report(SQLAlchemyObjectType): - class Meta: - model = ReportModel + return [day.value for day in user.workout_goal] if user.workout_goal else [] - gym = graphene.Field(lambda: Gym) - user = graphene.Field(lambda: User) + @jwt_required() + def resolve_get_user_streak(self, info, id): + user = User.get_query(info).filter(UserModel.id == id).first() + if not user: + raise GraphQLError("User with the given ID does not exist.") - def resolve_gym(self, info): - query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() - return query + workouts = ( + Workout.get_query(info) + .filter(WorkoutModel.user_id == user.id) + .order_by(WorkoutModel.workout_time.desc()) + .all() + ) + + if not workouts: + return {"active_streak": 0, "max_streak": 0} + + workout_dates = {workout.workout_time.date() for workout in workouts} + sorted_dates = sorted(workout_dates, reverse=True) + + today = datetime.utcnow().date() + active_streak = 0 + max_streak = 0 + streak = 0 + prev_date = None + + for date in sorted_dates: + if prev_date and (prev_date - date).days > 1: + max_streak = max(max_streak, streak) + streak = 0 + + streak += 1 + prev_date = date + + if date == today or (date == today - timedelta(days=1) and active_streak == 0): + active_streak = streak + + max_streak = max(max_streak, streak) + + return {"active_streak": active_streak, "max_streak": max_streak} + + + def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): + valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] + if facility_id not in valid_facility_ids: + raise GraphQLError("Invalid facility ID.") + query = HourlyAverageCapacity.get_query(info).filter(HourlyAverageCapacityModel.facility_id == facility_id) + return query.all() - def resolve_user(self, info): - query = User.get_query(info).filter(UserModel.id == self.user_id).first() - return query - # MARK: - Mutation +class LoginUser(graphene.Mutation): + class Arguments: + net_id = graphene.String(required=True) + + access_token = graphene.String() + refresh_token = graphene.String() + + def mutate(self, info, net_id): + user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + if not user: + return GraphQLError("No user with those credentials. Please create an account and try again.") + + # Generate JWT token + access_token = create_access_token(identity=str(user.id)) + refresh_token = create_refresh_token(identity=str(user.id)) + + db_session.commit() + + return LoginUser(access_token=access_token, refresh_token=refresh_token) + + +class RefreshAccessToken(graphene.Mutation): + new_access_token = graphene.String() + + @jwt_required(refresh=True) + def mutate(self, info): + identity = get_jwt_identity() + + new_access_token = create_access_token(identity=identity) + return RefreshAccessToken(new_access_token=new_access_token) + + +class LogoutUser(graphene.Mutation): + success = graphene.Boolean() + + @jwt_required(verify_type=False) # Allows both access and refresh tokens + def mutate(self, info): + token = get_jwt() + jti = token["jti"] # Unique identifier for the token + + # Get expiration time from JWT itself + expires_at = datetime.fromtimestamp(token["exp"], tz=timezone.utc) + + # Store in blocklist + token = TokenBlocklist(jti=jti, expires_at=expires_at) + db_session.add(token) + db_session.commit() + + return LogoutUser(success=True) + + class CreateUser(graphene.Mutation): class Arguments: name = graphene.String(required=True) net_id = graphene.String(required=True) email = graphene.String(required=True) + encoded_image = graphene.String(required=False) Output = User - def mutate(self, info, name, net_id, email): + def mutate(self, info, name, net_id, email, encoded_image=None): # Check if a user with the given NetID already exists existing_user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + final_photo_url = None if existing_user: raise GraphQLError("NetID already exists.") - new_user = UserModel(name=name, net_id=net_id, email=email) + if encoded_image: + upload_url = os.getenv("DIGITAL_OCEAN_URL") + payload = { + "bucket": os.getenv("BUCKET_NAME"), + "image": encoded_image # Base64-encoded image string + } + headers = {"Content-Type": "application/json"} + try: + response = requests.post(upload_url, json=payload, headers=headers) + response.raise_for_status() + json_response = response.json() + final_photo_url = json_response.get("data") + if not final_photo_url: + raise GraphQLError("No URL returned from upload service.") + except requests.exceptions.RequestException as e: + print(f"Request failed: {e}") + raise GraphQLError("Failed to upload photo.") + + new_user = UserModel(name=name, net_id=net_id, email=email, encoded_image=final_photo_url) db_session.add(new_user) db_session.commit() return new_user + +class EditUser(graphene.Mutation): + class Arguments: + name = graphene.String(required=False) + net_id = graphene.String(required=True) + email = graphene.String(required=False) + encoded_image = graphene.String(required=False) + + Output = User + def mutate(self, info, net_id, name=None, email=None, encoded_image=None): + existing_user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + if not existing_user: + raise GraphQLError("User with given net id does not exist.") + + if name is not None: + existing_user.name = name + if email is not None: + existing_user.email = email + if encoded_image is not None: + upload_url = os.getenv("DIGITAL_OCEAN_URL") # Base URL for upload endpoint + if not upload_url: + raise GraphQLError("Upload URL not configured.") + + payload = { + "bucket": os.getenv("BUCKET_NAME", "DEV_BUCKET"), + "image": encoded_image # Base64-encoded image string + } + headers = {"Content-Type": "application/json"} + + print(f"Uploading image with payload: {payload}") + + try: + response = requests.post(upload_url, json=payload, headers=headers) + response.raise_for_status() + json_response = response.json() + print(f"Upload API response: {json_response}") + final_photo_url = json_response.get("data") + if not final_photo_url: + raise GraphQLError("No URL returned from upload service.") + existing_user.encoded_image = final_photo_url + except requests.exceptions.RequestException as e: + print(f"Request failed: {e}") + raise GraphQLError("Failed to upload photo.") + + db_session.commit() + return existing_user class EnterGiveaway(graphene.Mutation): class Arguments: @@ -311,6 +518,7 @@ class Arguments: Output = GiveawayInstance + @jwt_required() def mutate(self, info, user_net_id, giveaway_id): # Check if NetID and Giveaway ID exists user = User.get_query(info).filter(UserModel.net_id == user_net_id).first() @@ -364,6 +572,7 @@ class Arguments: Output = User + @jwt_required() def mutate(self, info, user_id, workout_goal): user = User.get_query(info).filter(UserModel.id == user_id).first() if not user: @@ -374,7 +583,7 @@ def mutate(self, info, user_id, workout_goal): for day in workout_goal: try: # Convert string to enum - validated_workout_goal.append(DayOfWeekEnum[day.upper()]) + validated_workout_goal.append(DayOfWeekGraphQLEnum[day.upper()].value) except KeyError: raise GraphQLError(f"Invalid day of the week: {day}") @@ -389,23 +598,28 @@ class logWorkout(graphene.Mutation): class Arguments: workout_time = graphene.DateTime(required=True) user_id = graphene.Int(required=True) + facility_id = graphene.Int(required=True) Output = Workout + @jwt_required() def mutate(self, info, workout_time, user_id): user = User.get_query(info).filter(UserModel.id == user_id).first() if not user: raise GraphQLError("User with given ID does not exist.") + facility = Facility.get_query(info).filter(FacilityModel.id == facility_id).first() + if not facility: + raise GraphQLError("Facility with given ID does not exist.") - workout = WorkoutModel(workout_time=workout_time, user_id=user.id) + workout = WorkoutModel(workout_time=workout_time, user_id=user.id, facility_id=facility.id) db_session.add(workout) db_session.commit() return workout + class CreateReport(graphene.Mutation): class Arguments: - user_id = graphene.Int(required=True) issue = graphene.String(required=True) description = graphene.String(required=True) created_at = graphene.DateTime(required=True) @@ -413,32 +627,54 @@ class Arguments: report = graphene.Field(Report) - def mutate(self, info, description, user_id, issue, created_at, gym_id): - # Check if user exists - user = User.get_query(info).filter(UserModel.id == user_id).first() - if not user: - raise GraphQLError("User with given ID does not exist.") + def mutate(self, info, description, issue, created_at, gym_id): # Check if gym exists gym = Gym.get_query(info).filter(GymModel.id == gym_id).first() if not gym: raise GraphQLError("Gym with given ID does not exist.") # Check if issue is a valid enumeration - if issue not in ["INACCURATE_EQUIPMENT", "INCORRECT_HOURS", "INACCURATE_DESCRIPTION", "WAIT_TIMES_NOT_UPDATED", "OTHER"]: + if issue not in [ + "INACCURATE_EQUIPMENT", + "INCORRECT_HOURS", + "INACCURATE_DESCRIPTION", + "WAIT_TIMES_NOT_UPDATED", + "OTHER", + ]: raise GraphQLError("Issue is not a valid enumeration.") - report = ReportModel(description=description, user_id=user_id, issue=issue, - created_at=created_at, gym_id=gym_id) + report = ReportModel(description=description, issue=issue, created_at=created_at, gym_id=gym_id) db_session.add(report) db_session.commit() return CreateReport(report=report) +class DeleteUserById(graphene.Mutation): + class Arguments: + user_id = graphene.Int(required=True) + + Output = User + + def mutate(self, info, user_id): + # Check if user exists + user = User.get_query(info).filter(UserModel.id == user_id).first() + if not user: + raise GraphQLError("User with given ID does not exist.") + db_session.delete(user) + db_session.commit() + return user + + class Mutation(graphene.ObjectType): create_giveaway = CreateGiveaway.Field(description="Creates a new giveaway.") create_user = CreateUser.Field(description="Creates a new user.") + edit_user = EditUser.Field(description="Edit a new user.") enter_giveaway = EnterGiveaway.Field(description="Enters a user into a giveaway.") set_workout_goals = SetWorkoutGoals.Field(description="Set a user's workout goals.") log_workout = logWorkout.Field(description="Log a user's workout.") + login_user = LoginUser.Field(description="Login a user.") + logout_user = LogoutUser.Field(description="Logs out a user.") + refresh_access_token = RefreshAccessToken.Field(description="Refreshes the access token.") create_report = CreateReport.Field(description="Creates a new report.") + delete_user = DeleteUserById.Field(description="Deletes a user by ID.") schema = graphene.Schema(query=Query, mutation=Mutation) diff --git a/src/scrapers/capacities_scraper.py b/src/scrapers/capacities_scraper.py index bc269ff..8e58857 100644 --- a/src/scrapers/capacities_scraper.py +++ b/src/scrapers/capacities_scraper.py @@ -4,8 +4,11 @@ from datetime import datetime from src.database import db_session from src.models.capacity import Capacity +from src.models.hourly_average_capacity import HourlyAverageCapacity +from src.models.enums import DayOfWeekEnum from src.utils.constants import ( C2C_URL, + CRC_URL_NEW, CAPACITY_MARKER_COUNTS, CAPACITY_MARKER_NAMES, CAPACITY_MARKER_UPDATED, @@ -14,8 +17,8 @@ ) from src.utils.utils import get_facility_id, unix_time - -def fetch_capacities(): +# Legacy scraper from old webpage using CRC_URL +def fetch_capacities_old(): """ Fetch capacities for all facilities from Connect2Concepts. """ @@ -49,6 +52,44 @@ def fetch_capacities(): # Add to sheets add_single_capacity(count, facility_id, percent, updated) +# New scraper from new API using CRC_URL_NEW +def fetch_capacities(): + """Fetch capacities from the new JSON API endpoint.""" + try: + headers = { + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0" + } + + response = requests.get(CRC_URL_NEW, headers=headers) + facilities = response.json() + + for facility in facilities: + try: + facility_name = facility["LocationName"] + + # Map API name to database name + if facility_name not in CAPACITY_MARKER_NAMES: + print(f"Warning: No name mapping for facility: {facility_name}") + continue + + db_name = CAPACITY_MARKER_NAMES[facility_name] + facility_id = get_facility_id(db_name) + + count = int(facility["LastCount"]) + updated_str = facility["LastUpdatedDateAndTime"] + total_capacity = int(facility["TotalCapacity"]) + + percent = count / total_capacity if total_capacity > 0 else 0.0 + updated = datetime.strptime(updated_str.split(".")[0], "%Y-%m-%dT%H:%M:%S") + + add_single_capacity(count, facility_id, percent, updated) + + except Exception as e: + print(f"Error processing facility {facility.get('LocationName', 'unknown')}: {str(e)}") + + except Exception as e: + print(f"Error fetching capacities: {str(e)}") + raise def add_single_capacity(count, facility_id, percent, updated): """ @@ -85,3 +126,34 @@ def get_capacity_datetime(time_str): format = "%m/%d/%Y %I:%M %p" time_obj = datetime.strptime(time_str, format) return time_obj + + +def update_hourly_capacity(curDay, curHour): + """ + Update hourly average capacity every hour based on collected data. + """ + currentCapacities = db_session.query(Capacity).all() + + for capacity in currentCapacities: + try: + hourly_average_capacity = db_session.query(HourlyAverageCapacity).filter(HourlyAverageCapacity.facility_id == capacity.facility_id, HourlyAverageCapacity.day_of_week == DayOfWeekEnum[curDay].value, HourlyAverageCapacity.hour_of_day == curHour).first() + + if hourly_average_capacity is not None: + print("updating average") + hourly_average_capacity.update_hourly_average(capacity.percent) + else: + print("No hourly capacity, creating new entry") + hourly_average_capacity = HourlyAverageCapacity( + facility_id=capacity.facility_id, + average_percent=capacity.percent, + hour_of_day=curHour, + day_of_week=DayOfWeekEnum[curDay].value, + history=[capacity.percent] + ) + + db_session.merge(hourly_average_capacity) + db_session.commit() + + except Exception as e: + print(f"Error updating hourly average: {e}") + diff --git a/src/scrapers/class_scraper.py b/src/scrapers/class_scraper.py index 18ccdd7..a55a27a 100644 --- a/src/scrapers/class_scraper.py +++ b/src/scrapers/class_scraper.py @@ -71,8 +71,8 @@ def fetch_classes(num_pages): gym_class = create_group_class(class_href) if gym_class is None or not gym_class.id: - raise Exception(f"Failed to create or retrieve gym class from {BASE_URL + class_href}") - + raise Exception(f"Failed to create or retrieve gym class from {BASE_URL + class_href}") + class_instance.class_id = gym_class.id date_string = row_elems[1].text.strip() if "Today" in date_string: diff --git a/src/scrapers/equipment_scraper.py b/src/scrapers/equipment_scraper.py index f55942b..c7e2b28 100644 --- a/src/scrapers/equipment_scraper.py +++ b/src/scrapers/equipment_scraper.py @@ -1,32 +1,35 @@ from bs4 import BeautifulSoup import requests +import json from src.database import db_session -from src.models.equipment import Equipment, EquipmentType, AccessibilityType +from src.models.equipment import Equipment, MuscleGroup, AccessibilityType from src.utils.utils import get_facility_id from src.utils.constants import HNH_DETAILS, NOYES_DETAILS, TEAGLE_DOWN_DETAILS, TEAGLE_UP_DETAILS, MORRISON_DETAILS equip_pages = [HNH_DETAILS, NOYES_DETAILS, TEAGLE_DOWN_DETAILS, TEAGLE_UP_DETAILS, MORRISON_DETAILS] +try: + # Load equipment labels from JSON file + with open('src/utils/equipment_labels.json') as file: + data = json.load(file) +except Exception as e: + raise RuntimeError(f"Failed to load equipment labels: {str(e)}") -def categorize_equip(category): - if "cardio" in category.lower(): - return EquipmentType.cardio - if "racks" in category.lower() or "benches" in category.lower(): - return EquipmentType.racks_and_benches - if "selectorized" in category.lower(): - return EquipmentType.selectorized - if "multi-cable" in category.lower(): - return EquipmentType.multi_cable - if "free weights" in category.lower(): - return EquipmentType.free_weights - if "miscellaneous" in category.lower(): - return EquipmentType.miscellaneous - if "plate" in category.lower(): - return EquipmentType.plate_loaded - return -1 - - -def create_equip(category, equip, fit_center_id, fit_center): +def categorize_equip(name): + try: + cats = data[name]["label"] + return [MuscleGroup[cat.replace(" ", "_")] for cat in cats] + except KeyError: + return [] # Return empty list if no muscle groups found + +def get_clean_name(name): + try: + return data[name]["clean_name"] + except KeyError: + return name + + +def create_equip(equip, fit_center_id, fit_center): """ Create equipment from a list of equipment. """ @@ -42,34 +45,43 @@ def create_equip(category, equip, fit_center_id, fit_center): if equip_obj[0].isnumeric(): num_objs = int(equip_obj[0]) equip_obj = equip_obj[1:] - equip_obj = " ".join(equip_obj) - + # Strip leading and trailing spaces and replace non-breaking space with regular space after joining + equip_obj = ((" ".join(equip_obj)).strip()).replace(chr(160), chr(32)) + clean_name = get_clean_name(equip_obj) num_objs = None if num_objs == 0 else num_objs accessibility_option = None if "wheelchair" not in equip_obj else 1 - equip_type = categorize_equip(category) + muscle_groups = categorize_equip(equip_obj) try: existing_equip = ( db_session.query(Equipment) .filter( Equipment.name == equip_obj, - Equipment.equipment_type == equip_type, Equipment.facility_id == fit_center_id, ) .first() ) - assert existing_equip is not None - except: + if existing_equip is not None: + continue + equip_db_obj = Equipment( - name=equip_obj, - equipment_type=equip_type, + name=equip_obj.strip(), facility_id=fit_center_id, + clean_name=clean_name, quantity=num_objs, accessibility=AccessibilityType.wheelchair if accessibility_option else None, + muscle_groups=muscle_groups, ) + equip_db_objs.append(equip_db_obj) - db_session.add_all(equip_db_objs) - db_session.commit() + + except Exception as e: + print(f"Error creating equipment {equip_obj}: {str(e)}") + continue + + if equip_db_objs: + db_session.add_all(equip_db_objs) + db_session.commit() def process_equip_page(page, fit_center): @@ -84,21 +96,21 @@ def process_equip_page(page, fit_center): head = table.find("thead").find_all("tr") body = table.find("tbody").find_all("tr") for row in range(len(head)): - categories = head[row].find_all("th") + muscle_groups = head[row].find_all("th") equip = body[row].find_all("td") - if categories[0].text: - create_equip(categories[0].text, equip[0], fit_center_id, fit_center) - if categories[1].text: - create_equip(categories[1].text, equip[1], fit_center_id, fit_center) + if muscle_groups[0].text: + create_equip(equip[0], fit_center_id, fit_center) + if muscle_groups[1].text: + create_equip(equip[1], fit_center_id, fit_center) else: body = table.find_all("tr") for even_row in range(0, len(body), 2): - categories = body[even_row].find_all("th") + muscle_groups = body[even_row].find_all("th") equip = body[even_row + 1].find_all("td") - if categories[0].text: - create_equip(categories[0].text, equip[0], fit_center_id, fit_center) - if categories[1].text: - create_equip(categories[1].text, equip[1], fit_center_id, fit_center) + if muscle_groups[0].text: + create_equip(equip[0], fit_center_id, fit_center) + if muscle_groups[1].text: + create_equip(equip[1], fit_center_id, fit_center) def scrape_equipment(): diff --git a/src/scrapers/sp_hours_scraper.py b/src/scrapers/sp_hours_scraper.py index 2ed147f..01e2af0 100644 --- a/src/scrapers/sp_hours_scraper.py +++ b/src/scrapers/sp_hours_scraper.py @@ -49,6 +49,8 @@ def fetch_sp_facility(): clean_hours(date, get_facility_id(name)) if hours != MARKER_CLOSED: parse_special_hours(hours, type, date, get_facility_id(name)) + else: + add_special_facility_hours(date, date, get_facility_id(name)) # MARK: Helpers @@ -100,6 +102,8 @@ def add_special_facility_hours(start_time, end_time, facility_id, court_type=Non # Convert datetime objects to Unix start_unix = unix_time(start_time) end_unix = unix_time(end_time) + + print(f"Adding special hours: start_unix={start_unix}, end_unix={end_unix}, facility_id={facility_id}, is_special=True") # Create hours hrs = OpenHours( @@ -115,3 +119,4 @@ def add_special_facility_hours(start_time, end_time, facility_id, court_type=Non # Add to database db_session.merge(hrs) db_session.commit() + print(f"Committed special hours for facility_id={facility_id}") diff --git a/src/utils/constants.py b/src/utils/constants.py index 58dc13d..3d0ed63 100644 --- a/src/utils/constants.py +++ b/src/utils/constants.py @@ -6,8 +6,10 @@ # Base URL for Cornell Recreation Website BASE_URL = "https://scl.cornell.edu/recreation/" -# The path for capacities +# The old path for capacities C2C_URL = "https://connect2concepts.com/connect2/?type=bar&key=355de24d-d0e4-4262-ae97-bc0c78b92839&loc_status=false" +# The new path for capacities +CRC_URL_NEW = "https://goboardapi.azurewebsites.net/api/FacilityCount/GetCountsByAccount?AccountAPIKey=355de24d-d0e4-4262-ae97-bc0c78b92839" # The marker for counts in the HTML CAPACITY_MARKER_COUNTS = "Last Count: " @@ -51,6 +53,9 @@ # The path for Helen Newman Fitness Center details HNH_DETAILS = "https://scl.cornell.edu/recreation/facility/helen-newman-fitness-center" +# JWT secret key +JWT_SECRET_KEY = os.environ["JWT_SECRET_KEY"] + # Marker in sheets for alternating between badminton and volleyball (HNH Fridays) MARKER_ALT = "(ALT)" diff --git a/src/utils/equipment_labels.json b/src/utils/equipment_labels.json new file mode 100644 index 0000000..4943a3a --- /dev/null +++ b/src/utils/equipment_labels.json @@ -0,0 +1,326 @@ +{ + "GRIPR Sandbag 2kg-10kg": { + "clean_name": "GRIPR Sandbag 2kg-10kg", + "label": ["MISCELLANEOUS"] + }, + "Power Lift Multi-Racks": { + "clean_name": "Power Lift Multi-Racks", + "label": ["HAMSTRINGS", "GLUTES", "CALVES"] + }, + "Stability Disks": { + "clean_name": "Stability Disks", + "label": ["ABDOMINALS"] + }, + "Expresso Upright Bike": { + "clean_name": "Expresso Upright Bike", + "label": ["CARDIO"] + }, + "Precor Treadmills": { + "clean_name": "Treadmills", + "label": ["CARDIO"] + }, + "Power Lift Half Racks": { + "clean_name": "Power Lift Half Racks", + "label": ["HAMSTRINGS", "GLUTES", "CALVES"] + }, + "Power Lift Adjustable Benches": { + "clean_name": "Power Lift Adjustable Benches", + "label": ["MISCELLANEOUS"] + }, + "Precor Glute Extension": { + "clean_name": "Glute Extension", + "label": ["GLUTES"] + }, + "Plyometric Boxes": { + "clean_name": "Plyometric Boxes", + "label": ["QUADS", "GLUTES"] + }, + "Matrix Recumbent Bike": { + "clean_name": "Matrix Recumbent Bike", + "label": ["CARDIO"] + }, + "Medicine Balls 1kg-5kg": { + "clean_name": "Medicine Balls 1kg-5kg", + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] + }, + "Power Lift Multi-Rack Benches": { + "clean_name": "Power Lift Multi-Rack Benches", + "label": ["MISCELLANEOUS"] + }, + "Power Lift Leg Press": { + "clean_name": "Power Lift Leg Press", + "label": ["QUADS", "GLUTES", "CALVES"] + }, + "Dumbbells 3lbs-70lbs": { + "clean_name": "Dumbbells 3lbs-70lbs", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + }, + "Medicine Balls 4kg-7kg": { + "clean_name": "Medicine Balls 4kg-7kg", + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] + }, + "Precor Pulldown": { + "clean_name": "Pulldown", + "label": ["BACK", "BICEPS"] + }, + "Bulgarian Bags 5kg-17kg": { + "clean_name": "Bulgarian Bags 5kg-17kg", + "label": ["MISCELLANEOUS"] + }, + "Dumbbells 3lbs-100lbs": { + "clean_name": "Dumbbells 3lbs-100lbs", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + }, + "Precor Recumbent Bike": { + "clean_name": "Recumbent Bike", + "label": ["CARDIO"] + }, + "Precor Seated Low Row": { + "clean_name": "Seated Low Row", + "label": ["BACK", "BICEPS"] + }, + "Precor Seated Leg Curl": { + "clean_name": "Seated Leg Curl", + "label": ["HAMSTRINGS"] + }, + "Bar Pads": { + "clean_name": "Bar Pads", + "label": ["MISCELLANEOUS"] + }, + "GRIPR Sandbag 2kg-16kg": { + "clean_name": "GRIPR Sandbag 2kg-16kg", + "label": ["MISCELLANEOUS"] + }, + "Precor Shoulder Press": { + "clean_name": "Shoulder Press", + "label": ["SHOULDERS", "TRICEPS"] + }, + "Barbells 35lbs": { + "clean_name": "Barbells 35lbs", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + }, + "AB Wheels": { + "clean_name": "AB Wheels", + "label": ["ABDOMINALS"] + }, + "Precor Tri Pushdown": { + "clean_name": "Tri Pushdown", + "label": ["TRICEPS"] + }, + "Matrix Upright Spin Bike": { + "clean_name": "Matrix Upright Spin Bike", + "label": ["CARDIO"] + }, + "Hex Trap Bar": { + "clean_name": "Hex Trap Bar", + "label": ["MISCELLANEOUS"] + }, + "Precor Leg Extension": { + "clean_name": "Leg Extension", + "label": ["QUADS"] + }, + "Precor Rear Delt/Pec Fly": { + "clean_name": "Rear Delt/Pec Fly", + "label": ["CHEST", "SHOULDERS", "BACK"] + }, + "Precor Treadmill": { + "clean_name": "Treadmill", + "label": ["CARDIO"] + }, + "Slam Balls 5kg-15kg": { + "clean_name": "Slam Balls 5kg-15kg", + "label": ["ABDOMINALS"] + }, + "Precor Hi/Lo Pulleys": { + "clean_name": "Hi/Lo Pulleys", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST"] + }, + "Matrix Climb Mill": { + "clean_name": "Matrix Climb Mill", + "label": ["CARDIO"] + }, + "Plyo Boxes": { + "clean_name": "Plyo Boxes", + "label": ["QUADS", "GLUTES"] + }, + "Matrix Treadmill": { + "clean_name": "Matrix Treadmill", + "label": ["CARDIO"] + }, + "Core Bags 5kg-25kg": { + "clean_name": "Core Bags 5kg-25kg", + "label": ["ABDOMINALS"] + }, + "Matrix Power Racks": { + "clean_name": "Matrix Power Racks", + "label": ["MISCELLANEOUS"] + }, + "Matrix Rowing Ergometer": { + "clean_name": "Matrix Rowing Ergometer", + "label": ["CARDIO", "BACK"] + }, + "360 Multi-Trainer": { + "clean_name": "360 Multi-Trainer", + "label": ["MISCELLANEOUS"] + }, + "Precor Seated Row": { + "clean_name": "Seated Row", + "label": ["BACK", "BICEPS"] + }, + "EZ Curl Bar": { + "clean_name": "EZ Curl Bar", + "label": ["BICEPS", "TRICEPS"] + }, + "Precor Incline Lever Row": { + "clean_name": "Incline Lever Row", + "label": ["BACK", "BICEPS"] + }, + "Stretch Bands": { + "clean_name": "Stretch Bands", + "label": ["MISCELLANEOUS"] + }, + "BOSU Ball": { + "clean_name": "BOSU Ball", + "label": ["ABDOMINALS"] + }, + "Precor Elliptical": { + "clean_name": "Elliptical", + "label": ["CARDIO"] + }, + "C2 Rowing Ergometer": { + "clean_name": "C2 Rowing Ergometer", + "label": ["CARDIO", "BACK"] + }, + "GRIPR Sandbag 2kg-12kg": { + "clean_name": "GRIPR Sandbag 2kg-12kg", + "label": ["MISCELLANEOUS"] + }, + "Precor Inner/Outer Thigh": { + "clean_name": "Inner/Outer Thigh", + "label": ["GLUTES"] + }, + "Stability Balls": { + "clean_name": "Stability Balls", + "label": ["ABDOMINALS"] + }, + "Precor Preacher Curl": { + "clean_name": "Preacher Curl", + "label": ["BICEPS"] + }, + "SciFit Total Body Bike (wheelchair accessible)": { + "clean_name": "SciFit Total Body Bike (wheelchair accessible)", + "label": ["CARDIO"] + }, + "Foam Rollers": { + "clean_name": "Foam Rollers", + "label": ["MISCELLANEOUS"] + }, + "Precor BACK Extension": { + "clean_name": "BACK Extension", + "label": ["BACK", "ABDOMINALS"] + }, + "Precor AMT": { + "clean_name": "AMT", + "label": ["CARDIO"] + }, + "Precor Upright Bike": { + "clean_name": "Upright Bike", + "label": ["CARDIO"] + }, + "Precor Rotary Torso": { + "clean_name": "Rotary Torso", + "label": ["ABDOMINALS"] + }, + "Precor Tricep Pushdown": { + "clean_name": "Tricep Pushdown", + "label": ["TRICEPS"] + }, + "Power Lift Bench Press": { + "clean_name": "Power Lift Bench Press", + "label": ["CHEST", "TRICEPS", "SHOULDERS"] + }, + "Power Lift Glute Ham Raise": { + "clean_name": "Power Lift Glute Ham Raise", + "label": ["HAMSTRINGS", "GLUTES"] + }, + "Precor AMTs": { + "clean_name": "AMTs", + "label": ["CARDIO"] + }, + "Precor Ellipticals": { + "clean_name": "Ellipticals", + "label": ["CARDIO"] + }, + "Precor Chin/Dip Assist": { + "clean_name": "Chin/Dip Assist", + "label": ["BACK", "CHEST", "TRICEPS"] + }, + "Power Lift Bench Presses": { + "clean_name": "Power Lift Bench Presses", + "label": ["CHEST", "TRICEPS", "SHOULDERS"] + }, + "Power Lift Prone Leg Curl": { + "clean_name": "Power Lift Prone Leg Curl", + "label": ["HAMSTRINGS"] + }, + "Precor CHEST Press": { + "clean_name": "CHEST Press", + "label": ["CHEST", "TRICEPS", "SHOULDERS"] + }, + "Soft TIYR (tire) 60kg": { + "clean_name": "Soft TIYR (tire) 60kg", + "label": ["MISCELLANEOUS"] + }, + "Precor Calf Press": { + "clean_name": "Calf Press", + "label": ["CALVES"] + }, + "Power Lift Leg Extension": { + "clean_name": "Power Lift Leg Extension", + "label": ["QUADS"] + }, + "Power Lift Half Racks & Platforms": { + "clean_name": "Power Lift Half Racks & Platforms", + "label": ["MISCELLANEOUS"] + }, + "Slam Balls": { + "clean_name": "Slam Balls", + "label": ["ABDOMINALS"] + }, + "Precor Hi/Lo Pulley": { + "clean_name": "Hi/Lo Pulley", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST"] + }, + "Precor Leg Press": { + "clean_name": "Leg Press", + "label": ["QUADS", "GLUTES", "CALVES"] + }, + "Kettlebells 8kg-32kg": { + "clean_name": "Kettlebells 8kg-32kg", + "label": ["ABDOMINALS", "GLUTES", "BICEPS", "BACK"] + }, + "Medicine Balls 1kg-10kg": { + "clean_name": "Medicine Balls 1kg-10kg", + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] + }, + "Precor Standing Leg Curl": { + "clean_name": "Standing Leg Curl", + "label": ["HAMSTRINGS"] + }, + "Dumbbells 3lbs-125lbs": { + "clean_name": "Dumbbells 3lbs-125lbs", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + }, + "Precor Lat Pulldown": { + "clean_name": "Lat Pulldown", + "label": ["BACK", "BICEPS"] + }, + "Barbells 45lbs": { + "clean_name": "Barbells 45lbs", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + }, + "Marpo Rope Trainer": { + "clean_name": "Marpo Rope Trainer", + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] + } +} \ No newline at end of file diff --git a/src/utils/utils.py b/src/utils/utils.py index 5acb36c..2233eab 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -7,6 +7,7 @@ from src.models.gym import Gym from src.models.facility import Facility, FacilityType from src.models.amenity import Amenity, AmenityType +from src.models.workout import Workout from src.utils.constants import ASSET_BASE_URL, EASTERN_TIMEZONE @@ -140,3 +141,54 @@ def get_facility_id(name): """ facility = Facility.query.filter_by(name=name).first() return facility.id + +def calculate_streaks(user, workouts, workout_goal): + """ + Calculate the current and maximum workout streaks for a user. + + Parameters: + - `user` The user object. + - `workouts` The user's list of completed workouts. + - `workout_goal` A list of goal days (e.g., ['Monday', 'Wednesday']). + + Returns: + - Updates `user.active_streak` and `user.max_streak`. + """ + if not workouts: + user.active_streak = 0 + user.max_streak = user.max_streak or 0 + return + + # Convert goal days to set of weekday numbers (Monday=0, Sunday=6) + goal_days = {time.strptime(day, "%A").tm_wday for day in workout_goal} + + # Filter workouts to only include those on goal days + valid_workouts = [w for w in workouts if w.workout_time.weekday() in goal_days] + + # Sort by workout date + valid_workouts.sort(key=lambda x: x.workout_time) + + active_streak = 1 + max_streak = user.max_streak or 0 + + for i in range(1, len(valid_workouts)): + prev_day = valid_workouts[i - 1].workout_time + curr_day = valid_workouts[i].workout_time + + # Find the next expected goal day + expected_next_day = prev_day + timedelta(days=1) + while expected_next_day.weekday() not in goal_days: + expected_next_day += timedelta(days=1) + + # Check if current workout is on the expected next goal day + if curr_day.date() == expected_next_day.date(): + active_streak += 1 + else: + max_streak = max(max_streak, active_streak) + active_streak = 1 + + # Final update + max_streak = max(max_streak, active_streak) + user.active_streak = active_streak + user.max_streak = max_streak +