From 20df4f1d6527241909776a9268e67ff8df132741 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 30 Oct 2024 18:31:26 -0400 Subject: [PATCH 01/53] Implemented Gym Equipment Categories - Created a json file containing the equipment labels and their clean name - Fixed bug in equipment scraper to replace different ASCII character's representation of a space character - Changed equipment type enumerations in equipment model - Added categories column and clean_name column in equipment model --- schema.graphql | 29 ++- src/models/equipment.py | 41 ++-- src/scrapers/class_scraper.py | 4 +- src/scrapers/equipment_scraper.py | 75 ++++--- src/utils/equipment_labels.json | 326 ++++++++++++++++++++++++++++++ 5 files changed, 407 insertions(+), 68 deletions(-) create mode 100644 src/utils/equipment_labels.json diff --git a/schema.graphql b/schema.graphql index e44663b..06c080f 100644 --- a/schema.graphql +++ b/schema.graphql @@ -65,18 +65,10 @@ enum CourtType { BADMINTON } -type CreateGiveaway { - giveaway: Giveaway -} - type CreateReport { report: Report } -type CreateUser { - user: User -} - scalar DateTime enum DayOfWeekEnum { @@ -92,20 +84,26 @@ enum DayOfWeekEnum { type Equipment { id: ID! name: String! - equipmentType: EquipmentType! + categories: [EquipmentType]! + cleanName: String! facilityId: Int! quantity: Int accessibility: AccessibilityType } enum EquipmentType { - CARDIO - RACKS_AND_BENCHES - SELECTORIZED - MULTI_CABLE - FREE_WEIGHTS + ABDOMINALS + CHEST + BACK + SHOULDERS + BICEPS + TRICEPS + HAMSTRINGS + QUADS + GLUTES + CALVES MISCELLANEOUS - PLATE_LOADED + CARDIO } type Facility { @@ -160,6 +158,7 @@ type Mutation { enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(userId: Int!, workoutTime: DateTime!): Workout + createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!, userId: Int!): CreateReport } type OpenHours { diff --git a/src/models/equipment.py b/src/models/equipment.py index 2c82b13..d4d0a99 100644 --- a/src/models/equipment.py +++ b/src/models/equipment.py @@ -1,39 +1,44 @@ import enum -from sqlalchemy import Column, String, Enum, Integer, ForeignKey +from sqlalchemy import Column, String, Enum, Integer, ForeignKey, ARRAY +from sqlalchemy.orm import relationship from src.database import Base class EquipmentType(enum.Enum): - - cardio = 0 - racks_and_benches = 1 - selectorized = 2 - multi_cable = 3 - free_weights = 4 - miscellaneous = 5 - plate_loaded = 6 + ABDOMINALS = 1 # Core/Ab exercises + CHEST = 2 # Chest exercises + BACK = 3 # Back exercises + SHOULDERS = 4 # Shoulder exercises + BICEPS = 5 # Bicep exercises + TRICEPS = 6 # Tricep exercises + HAMSTRINGS = 7 # Hamstring exercises + QUADS = 8 # Quad exercises + GLUTES = 9 # Glute exercises + CALVES = 10 # Calf exercises + MISCELLANEOUS = 11 # General equipment, accessories, and multi-purpose items + CARDIO = 12 # Cardiovascular equipment class AccessibilityType(enum.Enum): wheelchair = 0 - class Equipment(Base): __tablename__ = "equipment" id = Column(Integer, primary_key=True) name = Column(String, nullable=False) - equipment_type = Column(Enum(EquipmentType), nullable=False) + categories = Column(ARRAY(Enum(EquipmentType)), nullable=False) + clean_name = Column(String, nullable=False) facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) quantity = Column(Integer, nullable=True) accessibility = Column(Enum(AccessibilityType), nullable=True) - def __init__(self, **kwargs): - self.id = kwargs.get("id") - self.name = kwargs.get("name") - self.equipment_type = kwargs.get("equipment_type") - self.facility_id = kwargs.get("facility_id") - self.quantity = kwargs.get("quantity") - self.accessibility = kwargs.get("accessibility") +def __init__(self, **kwargs): + self.id = kwargs.get("id") + self.name = kwargs.get("name") + self.categories = kwargs.get("categories") + self.facility_id = kwargs.get("facility_id") + self.quantity = kwargs.get("quantity") + self.accessibility = kwargs.get("accessibility") diff --git a/src/scrapers/class_scraper.py b/src/scrapers/class_scraper.py index 18ccdd7..a55a27a 100644 --- a/src/scrapers/class_scraper.py +++ b/src/scrapers/class_scraper.py @@ -71,8 +71,8 @@ def fetch_classes(num_pages): gym_class = create_group_class(class_href) if gym_class is None or not gym_class.id: - raise Exception(f"Failed to create or retrieve gym class from {BASE_URL + class_href}") - + raise Exception(f"Failed to create or retrieve gym class from {BASE_URL + class_href}") + class_instance.class_id = gym_class.id date_string = row_elems[1].text.strip() if "Today" in date_string: diff --git a/src/scrapers/equipment_scraper.py b/src/scrapers/equipment_scraper.py index f55942b..5ad1cf4 100644 --- a/src/scrapers/equipment_scraper.py +++ b/src/scrapers/equipment_scraper.py @@ -1,5 +1,6 @@ from bs4 import BeautifulSoup import requests +import json from src.database import db_session from src.models.equipment import Equipment, EquipmentType, AccessibilityType from src.utils.utils import get_facility_id @@ -7,26 +8,25 @@ equip_pages = [HNH_DETAILS, NOYES_DETAILS, TEAGLE_DOWN_DETAILS, TEAGLE_UP_DETAILS, MORRISON_DETAILS] +file = open('src/utils/equipment_labels.json') +data = json.load(file) +file.close() -def categorize_equip(category): - if "cardio" in category.lower(): - return EquipmentType.cardio - if "racks" in category.lower() or "benches" in category.lower(): - return EquipmentType.racks_and_benches - if "selectorized" in category.lower(): - return EquipmentType.selectorized - if "multi-cable" in category.lower(): - return EquipmentType.multi_cable - if "free weights" in category.lower(): - return EquipmentType.free_weights - if "miscellaneous" in category.lower(): - return EquipmentType.miscellaneous - if "plate" in category.lower(): - return EquipmentType.plate_loaded - return -1 - - -def create_equip(category, equip, fit_center_id, fit_center): +def categorize_equip(name): + try: + cats = data[name]["label"] + return [EquipmentType[cat.upper().replace(" ", "_")] for cat in cats] + except KeyError: + return [] # Return empty list if no categories found + +def get_clean_name(name): + try: + return data[name]["clean_name"] + except KeyError: + return name + + +def create_equip(equip, fit_center_id, fit_center): """ Create equipment from a list of equipment. """ @@ -42,34 +42,43 @@ def create_equip(category, equip, fit_center_id, fit_center): if equip_obj[0].isnumeric(): num_objs = int(equip_obj[0]) equip_obj = equip_obj[1:] - equip_obj = " ".join(equip_obj) - + # Strip leading and trailing spaces and replace non-breaking space with regular space after joining + equip_obj = ((" ".join(equip_obj)).strip()).replace(chr(160), chr(32)) + clean_name = get_clean_name(equip_obj) num_objs = None if num_objs == 0 else num_objs accessibility_option = None if "wheelchair" not in equip_obj else 1 - equip_type = categorize_equip(category) + categories = categorize_equip(equip_obj) try: existing_equip = ( db_session.query(Equipment) .filter( Equipment.name == equip_obj, - Equipment.equipment_type == equip_type, Equipment.facility_id == fit_center_id, ) .first() ) - assert existing_equip is not None - except: + if existing_equip is not None: + continue + equip_db_obj = Equipment( - name=equip_obj, - equipment_type=equip_type, + name=equip_obj.strip(), facility_id=fit_center_id, + clean_name=clean_name, quantity=num_objs, accessibility=AccessibilityType.wheelchair if accessibility_option else None, + categories=categories, ) + equip_db_objs.append(equip_db_obj) - db_session.add_all(equip_db_objs) - db_session.commit() + + except Exception as e: + print(f"Error creating equipment {equip_obj}: {str(e)}") + continue + + if equip_db_objs: + db_session.add_all(equip_db_objs) + db_session.commit() def process_equip_page(page, fit_center): @@ -87,18 +96,18 @@ def process_equip_page(page, fit_center): categories = head[row].find_all("th") equip = body[row].find_all("td") if categories[0].text: - create_equip(categories[0].text, equip[0], fit_center_id, fit_center) + create_equip(equip[0], fit_center_id, fit_center) if categories[1].text: - create_equip(categories[1].text, equip[1], fit_center_id, fit_center) + create_equip(equip[1], fit_center_id, fit_center) else: body = table.find_all("tr") for even_row in range(0, len(body), 2): categories = body[even_row].find_all("th") equip = body[even_row + 1].find_all("td") if categories[0].text: - create_equip(categories[0].text, equip[0], fit_center_id, fit_center) + create_equip(equip[0], fit_center_id, fit_center) if categories[1].text: - create_equip(categories[1].text, equip[1], fit_center_id, fit_center) + create_equip(equip[1], fit_center_id, fit_center) def scrape_equipment(): diff --git a/src/utils/equipment_labels.json b/src/utils/equipment_labels.json new file mode 100644 index 0000000..148607e --- /dev/null +++ b/src/utils/equipment_labels.json @@ -0,0 +1,326 @@ +{ + "GRIPR Sandbag 2kg-10kg": { + "clean_name": "GRIPR Sandbag 2kg-10kg", + "label": ["Miscellaneous"] + }, + "Power Lift Multi-Racks": { + "clean_name": "Power Lift Multi-Racks", + "label": ["Hamstrings", "Glutes", "Calves"] + }, + "Stability Disks": { + "clean_name": "Stability Disks", + "label": ["Abdominals"] + }, + "Expresso Upright Bike": { + "clean_name": "Expresso Upright Bike", + "label": ["Cardio"] + }, + "Precor Treadmills": { + "clean_name": "Treadmills", + "label": ["Cardio"] + }, + "Power Lift Half Racks": { + "clean_name": "Power Lift Half Racks", + "label": ["Hamstrings", "Glutes", "Calves"] + }, + "Power Lift Adjustable Benches": { + "clean_name": "Power Lift Adjustable Benches", + "label": ["Miscellaneous"] + }, + "Precor Glute Extension": { + "clean_name": "Glute Extension", + "label": ["Glutes"] + }, + "Plyometric Boxes": { + "clean_name": "Plyometric Boxes", + "label": ["Quads", "Glutes"] + }, + "Matrix Recumbent Bike": { + "clean_name": "Matrix Recumbent Bike", + "label": ["Cardio"] + }, + "Medicine Balls 1kg-5kg": { + "clean_name": "Medicine Balls 1kg-5kg", + "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + }, + "Power Lift Multi-Rack Benches": { + "clean_name": "Power Lift Multi-Rack Benches", + "label": ["Miscellaneous"] + }, + "Power Lift Leg Press": { + "clean_name": "Power Lift Leg Press", + "label": ["Quads", "Glutes", "Calves"] + }, + "Dumbbells 3lbs-70lbs": { + "clean_name": "Dumbbells 3lbs-70lbs", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + }, + "Medicine Balls 4kg-7kg": { + "clean_name": "Medicine Balls 4kg-7kg", + "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + }, + "Precor Pulldown": { + "clean_name": "Pulldown", + "label": ["Back", "Biceps"] + }, + "Bulgarian Bags 5kg-17kg": { + "clean_name": "Bulgarian Bags 5kg-17kg", + "label": ["Miscellaneous"] + }, + "Dumbbells 3lbs-100lbs": { + "clean_name": "Dumbbells 3lbs-100lbs", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + }, + "Precor Recumbent Bike": { + "clean_name": "Recumbent Bike", + "label": ["Cardio"] + }, + "Precor Seated Low Row": { + "clean_name": "Seated Low Row", + "label": ["Back", "Biceps"] + }, + "Precor Seated Leg Curl": { + "clean_name": "Seated Leg Curl", + "label": ["Hamstrings"] + }, + "Bar Pads": { + "clean_name": "Bar Pads", + "label": ["Miscellaneous"] + }, + "GRIPR Sandbag 2kg-16kg": { + "clean_name": "GRIPR Sandbag 2kg-16kg", + "label": ["Miscellaneous"] + }, + "Precor Shoulder Press": { + "clean_name": "Shoulder Press", + "label": ["Shoulders", "Triceps"] + }, + "Barbells 35lbs": { + "clean_name": "Barbells 35lbs", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + }, + "AB Wheels": { + "clean_name": "AB Wheels", + "label": ["Abdominals"] + }, + "Precor Tri Pushdown": { + "clean_name": "Tri Pushdown", + "label": ["Triceps"] + }, + "Matrix Upright Spin Bike": { + "clean_name": "Matrix Upright Spin Bike", + "label": ["Cardio"] + }, + "Hex Trap Bar": { + "clean_name": "Hex Trap Bar", + "label": ["Miscellaneous"] + }, + "Precor Leg Extension": { + "clean_name": "Leg Extension", + "label": ["Quads"] + }, + "Precor Rear Delt/Pec Fly": { + "clean_name": "Rear Delt/Pec Fly", + "label": ["Chest", "Shoulders", "Back"] + }, + "Precor Treadmill": { + "clean_name": "Treadmill", + "label": ["Cardio"] + }, + "Slam Balls 5kg-15kg": { + "clean_name": "Slam Balls 5kg-15kg", + "label": ["Abdominals"] + }, + "Precor Hi/Lo Pulleys": { + "clean_name": "Hi/Lo Pulleys", + "label": ["Triceps", "Biceps", "Shoulders", "Chest"] + }, + "Matrix Climb Mill": { + "clean_name": "Matrix Climb Mill", + "label": ["Cardio"] + }, + "Plyo Boxes": { + "clean_name": "Plyo Boxes", + "label": ["Quads", "Glutes"] + }, + "Matrix Treadmill": { + "clean_name": "Matrix Treadmill", + "label": ["Cardio"] + }, + "Core Bags 5kg-25kg": { + "clean_name": "Core Bags 5kg-25kg", + "label": ["Abdominals"] + }, + "Matrix Power Racks": { + "clean_name": "Matrix Power Racks", + "label": ["Miscellaneous"] + }, + "Matrix Rowing Ergometer": { + "clean_name": "Matrix Rowing Ergometer", + "label": ["Cardio", "Back"] + }, + "360 Multi-Trainer": { + "clean_name": "360 Multi-Trainer", + "label": ["Miscellaneous"] + }, + "Precor Seated Row": { + "clean_name": "Seated Row", + "label": ["Back", "Biceps"] + }, + "EZ Curl Bar": { + "clean_name": "EZ Curl Bar", + "label": ["Biceps", "Triceps"] + }, + "Precor Incline Lever Row": { + "clean_name": "Incline Lever Row", + "label": ["Back", "Biceps"] + }, + "Stretch Bands": { + "clean_name": "Stretch Bands", + "label": ["Miscellaneous"] + }, + "BOSU Ball": { + "clean_name": "BOSU Ball", + "label": ["Abdominals"] + }, + "Precor Elliptical": { + "clean_name": "Elliptical", + "label": ["Cardio"] + }, + "C2 Rowing Ergometer": { + "clean_name": "C2 Rowing Ergometer", + "label": ["Cardio", "Back"] + }, + "GRIPR Sandbag 2kg-12kg": { + "clean_name": "GRIPR Sandbag 2kg-12kg", + "label": ["Miscellaneous"] + }, + "Precor Inner/Outer Thigh": { + "clean_name": "Inner/Outer Thigh", + "label": ["Glutes"] + }, + "Stability Balls": { + "clean_name": "Stability Balls", + "label": ["Abdominals"] + }, + "Precor Preacher Curl": { + "clean_name": "Preacher Curl", + "label": ["Biceps"] + }, + "SciFit Total Body Bike (wheelchair accessible)": { + "clean_name": "SciFit Total Body Bike (wheelchair accessible)", + "label": ["Cardio"] + }, + "Foam Rollers": { + "clean_name": "Foam Rollers", + "label": ["Miscellaneous"] + }, + "Precor Back Extension": { + "clean_name": "Back Extension", + "label": ["Back", "Abdominals"] + }, + "Precor AMT": { + "clean_name": "AMT", + "label": ["Cardio"] + }, + "Precor Upright Bike": { + "clean_name": "Upright Bike", + "label": ["Cardio"] + }, + "Precor Rotary Torso": { + "clean_name": "Rotary Torso", + "label": ["Abdominals"] + }, + "Precor Tricep Pushdown": { + "clean_name": "Tricep Pushdown", + "label": ["Triceps"] + }, + "Power Lift Bench Press": { + "clean_name": "Power Lift Bench Press", + "label": ["Chest", "Triceps", "Shoulders"] + }, + "Power Lift Glute Ham Raise": { + "clean_name": "Power Lift Glute Ham Raise", + "label": ["Hamstrings", "Glutes"] + }, + "Precor AMTs": { + "clean_name": "AMTs", + "label": ["Cardio"] + }, + "Precor Ellipticals": { + "clean_name": "Ellipticals", + "label": ["Cardio"] + }, + "Precor Chin/Dip Assist": { + "clean_name": "Chin/Dip Assist", + "label": ["Back", "Chest", "Triceps"] + }, + "Power Lift Bench Presses": { + "clean_name": "Power Lift Bench Presses", + "label": ["Chest", "Triceps", "Shoulders"] + }, + "Power Lift Prone Leg Curl": { + "clean_name": "Power Lift Prone Leg Curl", + "label": ["Hamstrings"] + }, + "Precor Chest Press": { + "clean_name": "Chest Press", + "label": ["Chest", "Triceps", "Shoulders"] + }, + "Soft TIYR (tire) 60kg": { + "clean_name": "Soft TIYR (tire) 60kg", + "label": ["Miscellaneous"] + }, + "Precor Calf Press": { + "clean_name": "Calf Press", + "label": ["Calves"] + }, + "Power Lift Leg Extension": { + "clean_name": "Power Lift Leg Extension", + "label": ["Quads"] + }, + "Power Lift Half Racks & Platforms": { + "clean_name": "Power Lift Half Racks & Platforms", + "label": ["Miscellaneous"] + }, + "Slam Balls": { + "clean_name": "Slam Balls", + "label": ["Abdominals"] + }, + "Precor Hi/Lo Pulley": { + "clean_name": "Hi/Lo Pulley", + "label": ["Miscellaneous"] + }, + "Precor Leg Press": { + "clean_name": "Leg Press", + "label": ["Quads", "Glutes", "Calves"] + }, + "Kettlebells 8kg-32kg": { + "clean_name": "Kettlebells 8kg-32kg", + "label": ["Abdominals", "Glutes", "Biceps", "Back"] + }, + "Medicine Balls 1kg-10kg": { + "clean_name": "Medicine Balls 1kg-10kg", + "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + }, + "Precor Standing Leg Curl": { + "clean_name": "Standing Leg Curl", + "label": ["Hamstrings"] + }, + "Dumbbells 3lbs-125lbs": { + "clean_name": "Dumbbells 3lbs-125lbs", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + }, + "Precor Lat Pulldown": { + "clean_name": "Lat Pulldown", + "label": ["Back", "Biceps"] + }, + "Barbells 45lbs": { + "clean_name": "Barbells 45lbs", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + }, + "Marpo Rope Trainer": { + "clean_name": "Marpo Rope Trainer", + "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + } +} \ No newline at end of file From 09c00cf294bb3aef592321d2d80c7cf1a19947ae Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 3 Nov 2024 21:57:55 -0500 Subject: [PATCH 02/53] Implemented migration scripts --- app.py | 9 ++++++++- manager.py | 8 ++++---- schema.graphql | 4 +++- src/database.py | 17 +++++++++++++---- src/models/user.py | 3 ++- src/schema.py | 35 +++++++++++++++++++---------------- 6 files changed, 49 insertions(+), 27 deletions(-) diff --git a/app.py b/app.py index 6fb7b04..2d05a81 100644 --- a/app.py +++ b/app.py @@ -6,6 +6,8 @@ from graphene import Schema from graphql.utils import schema_printer from src.database import db_session, init_db +from src.database import Base as db +from flask_migrate import Migrate from src.schema import Query, Mutation from src.scrapers.capacities_scraper import fetch_capacities from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility @@ -17,6 +19,7 @@ from src.utils.utils import create_gym_table from src.models.openhours import OpenHours from flasgger import Swagger +import os sentry_sdk.init( @@ -32,6 +35,11 @@ app = Flask(__name__) app.debug = True + +app.config['SQLALCHEMY_DATABASE_URI'] = f"postgresql://{os.environ.get('DB_USERNAME')}:{os.environ.get('DB_PASSWORD')}@{os.environ.get('DB_HOST')}:{os.environ.get('DB_PORT')}/{os.environ.get('DB_NAME')}" +app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False +# Initialize migrations +migrate = Migrate(app, db) schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) @@ -84,7 +92,6 @@ def scrape_capacities(): def scrape_classes(): logging.info("Scraping classes from group-fitness-classes...") - fetch_classes(10) diff --git a/manager.py b/manager.py index c38d51d..9f9a657 100644 --- a/manager.py +++ b/manager.py @@ -1,9 +1,9 @@ from flask_script import Manager -from flask_migrate import MigrateCommand -from app import app # , db +from flask_migrate import Migrate, MigrateCommand +from app import app +from src.database import Base as db -# Build manager -# migrate = Migrate(app, db) +migrate = Migrate(app, db) manager = Manager(app) manager.add_command("db", MigrateCommand) diff --git a/schema.graphql b/schema.graphql index 06c080f..85e3030 100644 --- a/schema.graphql +++ b/schema.graphql @@ -193,6 +193,7 @@ type Query { getWeeklyWorkoutDays(id: Int): [String] getWorkoutsById(id: Int): [Workout] activities: [Activity] + getAllReports: [Report] } type Report { @@ -216,10 +217,11 @@ enum ReportType { type User { id: ID! - email: String! + email: String netId: String! name: String! workoutGoal: [DayOfWeekEnum] + newColumn: String giveaways: [Giveaway] reports: [Report] } diff --git a/src/database.py b/src/database.py index 7b89f42..6ee43bb 100644 --- a/src/database.py +++ b/src/database.py @@ -3,12 +3,24 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker +from dotenv import load_dotenv +# Load environment variables from .env file +load_dotenv() + +# Get database credentials with logging db_user = os.environ.get("DB_USERNAME") db_password = os.environ.get("DB_PASSWORD") db_name = os.environ.get("DB_NAME") db_host = os.environ.get("DB_HOST") -db_port = os.environ.get("DB_PORT") +db_port = os.environ.get("DB_PORT", "25060") # Add default port + +# Verify all required variables are present +if not all([db_user, db_password, db_name, db_host, db_port]): + raise ValueError( + "Missing required database configuration. " + "Please ensure all database environment variables are set." + ) db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}" engine = create_engine(db_url) @@ -17,13 +29,10 @@ Base = declarative_base() Base.query = db_session.query_property() - def init_db(): """ Initialize database for Uplift. """ logging.info("Initializing database") - - # Load initial data Base.metadata.create_all(bind=engine) db_session.commit() diff --git a/src/models/user.py b/src/models/user.py index f0f6bbc..537de61 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -31,9 +31,10 @@ class User(Base): __tablename__ = "users" id = Column(Integer, primary_key=True) - email = Column(String, nullable=False) + email = Column(String, nullable=True) giveaways = relationship("Giveaway", secondary="giveaway_instance", back_populates="users") reports = relationship("Report", back_populates="user") net_id = Column(String, nullable=False) name = Column(String, nullable=False) workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) + new_column = Column(String, nullable=True) diff --git a/src/schema.py b/src/schema.py index f68cb1c..c5e860d 100644 --- a/src/schema.py +++ b/src/schema.py @@ -206,6 +206,22 @@ class Workout(SQLAlchemyObjectType): class Meta: model = WorkoutModel +# MARK: - Report + +class Report(SQLAlchemyObjectType): + class Meta: + model = ReportModel + + gym = graphene.Field(lambda: Gym) + user = graphene.Field(lambda: User) + + def resolve_gym(self, info): + query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() + return query + + def resolve_user(self, info): + query = User.get_query(info).filter(UserModel.id == self.user_id).first() + return query # MARK: - Query @@ -218,6 +234,7 @@ class Query(graphene.ObjectType): ) get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) + get_all_reports = graphene.List(Report, description="Get all reports.") def resolve_get_all_gyms(self, info): query = Gym.get_query(info) @@ -261,24 +278,10 @@ def resolve_get_weekly_workout_days(self, info, id): return list(workout_days_set) - -# MARK: - Report - -class Report(SQLAlchemyObjectType): - class Meta: - model = ReportModel - - gym = graphene.Field(lambda: Gym) - user = graphene.Field(lambda: User) - - def resolve_gym(self, info): - query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() + def resolve_get_all_reports(self, info): + query = ReportModel.query.all() return query - def resolve_user(self, info): - query = User.get_query(info).filter(UserModel.id == self.user_id).first() - return query - # MARK: - Mutation From ce2a4287875369eecadf97e5c6f6ecdb7fa94b88 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 6 Nov 2024 11:09:09 -0500 Subject: [PATCH 03/53] Implemented changes on migration based on PR --- app.py | 17 ++- schema.graphql | 32 +++--- src/database.py | 23 ++-- src/models/equipment.py | 6 +- src/scrapers/equipment_scraper.py | 31 +++--- src/utils/equipment_labels.json | 170 +++++++++++++++--------------- 6 files changed, 141 insertions(+), 138 deletions(-) diff --git a/app.py b/app.py index 2d05a81..602102c 100644 --- a/app.py +++ b/app.py @@ -18,9 +18,8 @@ from src.scrapers.activities_scraper import fetch_activity from src.utils.utils import create_gym_table from src.models.openhours import OpenHours +from src.database import db_url, db_user, db_password, db_name, db_host, db_port from flasgger import Swagger -import os - sentry_sdk.init( dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", @@ -36,10 +35,16 @@ app = Flask(__name__) app.debug = True -app.config['SQLALCHEMY_DATABASE_URI'] = f"postgresql://{os.environ.get('DB_USERNAME')}:{os.environ.get('DB_PASSWORD')}@{os.environ.get('DB_HOST')}:{os.environ.get('DB_PORT')}/{os.environ.get('DB_NAME')}" +# Verify all required variables are present +if not all([db_user, db_password, db_name, db_host, db_port]): + raise ValueError( + "Missing required database configuration. " + "Please ensure all database environment variables are set." + ) + +app.config['SQLALCHEMY_DATABASE_URI'] = db_url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False -# Initialize migrations -migrate = Migrate(app, db) + schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) @@ -98,6 +103,7 @@ def scrape_classes(): # Create database and fill it with data init_db() create_gym_table() + scrape_classes() scrape_hours() scrape_capacities() @@ -105,6 +111,7 @@ def scrape_classes(): logging.info("Scraping activities from sheets...") fetch_activity() + # Create schema.graphql with open("schema.graphql", "w+") as schema_file: schema_file.write(schema_printer.print_schema(schema)) diff --git a/schema.graphql b/schema.graphql index 85e3030..024bc43 100644 --- a/schema.graphql +++ b/schema.graphql @@ -84,28 +84,13 @@ enum DayOfWeekEnum { type Equipment { id: ID! name: String! - categories: [EquipmentType]! + muscleGroups: [MuscleGroup]! cleanName: String! facilityId: Int! quantity: Int accessibility: AccessibilityType } -enum EquipmentType { - ABDOMINALS - CHEST - BACK - SHOULDERS - BICEPS - TRICEPS - HAMSTRINGS - QUADS - GLUTES - CALVES - MISCELLANEOUS - CARDIO -} - type Facility { id: ID! facilityType: FacilityType! @@ -152,6 +137,21 @@ type Gym { reports: [Report] } +enum MuscleGroup { + ABDOMINALS + CHEST + BACK + SHOULDERS + BICEPS + TRICEPS + HAMSTRINGS + QUADS + GLUTES + CALVES + MISCELLANEOUS + CARDIO +} + type Mutation { createGiveaway(name: String!): Giveaway createUser(email: String!, name: String!, netId: String!): User diff --git a/src/database.py b/src/database.py index 6ee43bb..2a9e5fe 100644 --- a/src/database.py +++ b/src/database.py @@ -3,26 +3,19 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -from dotenv import load_dotenv +import dotenv -# Load environment variables from .env file -load_dotenv() +dotenv.load_dotenv() # Get database credentials with logging -db_user = os.environ.get("DB_USERNAME") -db_password = os.environ.get("DB_PASSWORD") -db_name = os.environ.get("DB_NAME") -db_host = os.environ.get("DB_HOST") -db_port = os.environ.get("DB_PORT", "25060") # Add default port +db_user = os.getenv("DB_USERNAME") +db_password = os.getenv("DB_PASSWORD") +db_name = os.getenv("DB_NAME") +db_host = os.getenv("DB_HOST") +db_port = os.getenv("DB_PORT", "5432") # Add default port +db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}" -# Verify all required variables are present -if not all([db_user, db_password, db_name, db_host, db_port]): - raise ValueError( - "Missing required database configuration. " - "Please ensure all database environment variables are set." - ) -db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}" engine = create_engine(db_url) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) diff --git a/src/models/equipment.py b/src/models/equipment.py index d4d0a99..dbbefab 100644 --- a/src/models/equipment.py +++ b/src/models/equipment.py @@ -4,7 +4,7 @@ from src.database import Base -class EquipmentType(enum.Enum): +class MuscleGroup(enum.Enum): ABDOMINALS = 1 # Core/Ab exercises CHEST = 2 # Chest exercises BACK = 3 # Back exercises @@ -29,7 +29,7 @@ class Equipment(Base): id = Column(Integer, primary_key=True) name = Column(String, nullable=False) - categories = Column(ARRAY(Enum(EquipmentType)), nullable=False) + muscle_groups = Column(ARRAY(Enum(MuscleGroup)), nullable=False) clean_name = Column(String, nullable=False) facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) quantity = Column(Integer, nullable=True) @@ -38,7 +38,7 @@ class Equipment(Base): def __init__(self, **kwargs): self.id = kwargs.get("id") self.name = kwargs.get("name") - self.categories = kwargs.get("categories") + self.muscle_groups = kwargs.get("muscle_groups") self.facility_id = kwargs.get("facility_id") self.quantity = kwargs.get("quantity") self.accessibility = kwargs.get("accessibility") diff --git a/src/scrapers/equipment_scraper.py b/src/scrapers/equipment_scraper.py index 5ad1cf4..c7e2b28 100644 --- a/src/scrapers/equipment_scraper.py +++ b/src/scrapers/equipment_scraper.py @@ -2,22 +2,25 @@ import requests import json from src.database import db_session -from src.models.equipment import Equipment, EquipmentType, AccessibilityType +from src.models.equipment import Equipment, MuscleGroup, AccessibilityType from src.utils.utils import get_facility_id from src.utils.constants import HNH_DETAILS, NOYES_DETAILS, TEAGLE_DOWN_DETAILS, TEAGLE_UP_DETAILS, MORRISON_DETAILS equip_pages = [HNH_DETAILS, NOYES_DETAILS, TEAGLE_DOWN_DETAILS, TEAGLE_UP_DETAILS, MORRISON_DETAILS] -file = open('src/utils/equipment_labels.json') -data = json.load(file) -file.close() +try: + # Load equipment labels from JSON file + with open('src/utils/equipment_labels.json') as file: + data = json.load(file) +except Exception as e: + raise RuntimeError(f"Failed to load equipment labels: {str(e)}") def categorize_equip(name): try: cats = data[name]["label"] - return [EquipmentType[cat.upper().replace(" ", "_")] for cat in cats] + return [MuscleGroup[cat.replace(" ", "_")] for cat in cats] except KeyError: - return [] # Return empty list if no categories found + return [] # Return empty list if no muscle groups found def get_clean_name(name): try: @@ -47,7 +50,7 @@ def create_equip(equip, fit_center_id, fit_center): clean_name = get_clean_name(equip_obj) num_objs = None if num_objs == 0 else num_objs accessibility_option = None if "wheelchair" not in equip_obj else 1 - categories = categorize_equip(equip_obj) + muscle_groups = categorize_equip(equip_obj) try: existing_equip = ( @@ -67,7 +70,7 @@ def create_equip(equip, fit_center_id, fit_center): clean_name=clean_name, quantity=num_objs, accessibility=AccessibilityType.wheelchair if accessibility_option else None, - categories=categories, + muscle_groups=muscle_groups, ) equip_db_objs.append(equip_db_obj) @@ -93,20 +96,20 @@ def process_equip_page(page, fit_center): head = table.find("thead").find_all("tr") body = table.find("tbody").find_all("tr") for row in range(len(head)): - categories = head[row].find_all("th") + muscle_groups = head[row].find_all("th") equip = body[row].find_all("td") - if categories[0].text: + if muscle_groups[0].text: create_equip(equip[0], fit_center_id, fit_center) - if categories[1].text: + if muscle_groups[1].text: create_equip(equip[1], fit_center_id, fit_center) else: body = table.find_all("tr") for even_row in range(0, len(body), 2): - categories = body[even_row].find_all("th") + muscle_groups = body[even_row].find_all("th") equip = body[even_row + 1].find_all("td") - if categories[0].text: + if muscle_groups[0].text: create_equip(equip[0], fit_center_id, fit_center) - if categories[1].text: + if muscle_groups[1].text: create_equip(equip[1], fit_center_id, fit_center) diff --git a/src/utils/equipment_labels.json b/src/utils/equipment_labels.json index 148607e..7f3d3bd 100644 --- a/src/utils/equipment_labels.json +++ b/src/utils/equipment_labels.json @@ -1,326 +1,326 @@ { "GRIPR Sandbag 2kg-10kg": { "clean_name": "GRIPR Sandbag 2kg-10kg", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Power Lift Multi-Racks": { "clean_name": "Power Lift Multi-Racks", - "label": ["Hamstrings", "Glutes", "Calves"] + "label": ["HAMSTRINGS", "GLUTES", "CALVES"] }, "Stability Disks": { "clean_name": "Stability Disks", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Expresso Upright Bike": { "clean_name": "Expresso Upright Bike", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Treadmills": { "clean_name": "Treadmills", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Power Lift Half Racks": { "clean_name": "Power Lift Half Racks", - "label": ["Hamstrings", "Glutes", "Calves"] + "label": ["HAMSTRINGS", "GLUTES", "CALVES"] }, "Power Lift Adjustable Benches": { "clean_name": "Power Lift Adjustable Benches", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Glute Extension": { "clean_name": "Glute Extension", - "label": ["Glutes"] + "label": ["GLUTES"] }, "Plyometric Boxes": { "clean_name": "Plyometric Boxes", - "label": ["Quads", "Glutes"] + "label": ["QUADS", "GLUTES"] }, "Matrix Recumbent Bike": { "clean_name": "Matrix Recumbent Bike", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Medicine Balls 1kg-5kg": { "clean_name": "Medicine Balls 1kg-5kg", - "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] }, "Power Lift Multi-Rack Benches": { "clean_name": "Power Lift Multi-Rack Benches", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Power Lift Leg Press": { "clean_name": "Power Lift Leg Press", - "label": ["Quads", "Glutes", "Calves"] + "label": ["QUADS", "GLUTES", "CALVES"] }, "Dumbbells 3lbs-70lbs": { "clean_name": "Dumbbells 3lbs-70lbs", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] }, "Medicine Balls 4kg-7kg": { "clean_name": "Medicine Balls 4kg-7kg", - "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] }, "Precor Pulldown": { "clean_name": "Pulldown", - "label": ["Back", "Biceps"] + "label": ["BACK", "BICEPS"] }, "Bulgarian Bags 5kg-17kg": { "clean_name": "Bulgarian Bags 5kg-17kg", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Dumbbells 3lbs-100lbs": { "clean_name": "Dumbbells 3lbs-100lbs", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] }, "Precor Recumbent Bike": { "clean_name": "Recumbent Bike", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Seated Low Row": { "clean_name": "Seated Low Row", - "label": ["Back", "Biceps"] + "label": ["BACK", "BICEPS"] }, "Precor Seated Leg Curl": { "clean_name": "Seated Leg Curl", - "label": ["Hamstrings"] + "label": ["HAMSTRINGS"] }, "Bar Pads": { "clean_name": "Bar Pads", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "GRIPR Sandbag 2kg-16kg": { "clean_name": "GRIPR Sandbag 2kg-16kg", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Shoulder Press": { "clean_name": "Shoulder Press", - "label": ["Shoulders", "Triceps"] + "label": ["SHOULDERS", "TRICEPS"] }, "Barbells 35lbs": { "clean_name": "Barbells 35lbs", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] }, "AB Wheels": { "clean_name": "AB Wheels", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Tri Pushdown": { "clean_name": "Tri Pushdown", - "label": ["Triceps"] + "label": ["TRICEPS"] }, "Matrix Upright Spin Bike": { "clean_name": "Matrix Upright Spin Bike", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Hex Trap Bar": { "clean_name": "Hex Trap Bar", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Leg Extension": { "clean_name": "Leg Extension", - "label": ["Quads"] + "label": ["QUADS"] }, "Precor Rear Delt/Pec Fly": { "clean_name": "Rear Delt/Pec Fly", - "label": ["Chest", "Shoulders", "Back"] + "label": ["CHEST", "SHOULDERS", "BACK"] }, "Precor Treadmill": { "clean_name": "Treadmill", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Slam Balls 5kg-15kg": { "clean_name": "Slam Balls 5kg-15kg", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Hi/Lo Pulleys": { "clean_name": "Hi/Lo Pulleys", - "label": ["Triceps", "Biceps", "Shoulders", "Chest"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST"] }, "Matrix Climb Mill": { "clean_name": "Matrix Climb Mill", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Plyo Boxes": { "clean_name": "Plyo Boxes", - "label": ["Quads", "Glutes"] + "label": ["QUADS", "GLUTES"] }, "Matrix Treadmill": { "clean_name": "Matrix Treadmill", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Core Bags 5kg-25kg": { "clean_name": "Core Bags 5kg-25kg", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Matrix Power Racks": { "clean_name": "Matrix Power Racks", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Matrix Rowing Ergometer": { "clean_name": "Matrix Rowing Ergometer", - "label": ["Cardio", "Back"] + "label": ["CARDIO", "BACK"] }, "360 Multi-Trainer": { "clean_name": "360 Multi-Trainer", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Seated Row": { "clean_name": "Seated Row", - "label": ["Back", "Biceps"] + "label": ["BACK", "BICEPS"] }, "EZ Curl Bar": { "clean_name": "EZ Curl Bar", - "label": ["Biceps", "Triceps"] + "label": ["BICEPS", "TRICEPS"] }, "Precor Incline Lever Row": { "clean_name": "Incline Lever Row", - "label": ["Back", "Biceps"] + "label": ["BACK", "BICEPS"] }, "Stretch Bands": { "clean_name": "Stretch Bands", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "BOSU Ball": { "clean_name": "BOSU Ball", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Elliptical": { "clean_name": "Elliptical", - "label": ["Cardio"] + "label": ["CARDIO"] }, "C2 Rowing Ergometer": { "clean_name": "C2 Rowing Ergometer", - "label": ["Cardio", "Back"] + "label": ["CARDIO", "BACK"] }, "GRIPR Sandbag 2kg-12kg": { "clean_name": "GRIPR Sandbag 2kg-12kg", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Inner/Outer Thigh": { "clean_name": "Inner/Outer Thigh", - "label": ["Glutes"] + "label": ["GLUTES"] }, "Stability Balls": { "clean_name": "Stability Balls", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Preacher Curl": { "clean_name": "Preacher Curl", - "label": ["Biceps"] + "label": ["BICEPS"] }, "SciFit Total Body Bike (wheelchair accessible)": { "clean_name": "SciFit Total Body Bike (wheelchair accessible)", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Foam Rollers": { "clean_name": "Foam Rollers", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, - "Precor Back Extension": { - "clean_name": "Back Extension", - "label": ["Back", "Abdominals"] + "Precor BACK Extension": { + "clean_name": "BACK Extension", + "label": ["BACK", "ABDOMINALS"] }, "Precor AMT": { "clean_name": "AMT", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Upright Bike": { "clean_name": "Upright Bike", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Rotary Torso": { "clean_name": "Rotary Torso", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Tricep Pushdown": { "clean_name": "Tricep Pushdown", - "label": ["Triceps"] + "label": ["TRICEPS"] }, "Power Lift Bench Press": { "clean_name": "Power Lift Bench Press", - "label": ["Chest", "Triceps", "Shoulders"] + "label": ["CHEST", "TRICEPS", "SHOULDERS"] }, "Power Lift Glute Ham Raise": { "clean_name": "Power Lift Glute Ham Raise", - "label": ["Hamstrings", "Glutes"] + "label": ["HAMSTRINGS", "GLUTES"] }, "Precor AMTs": { "clean_name": "AMTs", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Ellipticals": { "clean_name": "Ellipticals", - "label": ["Cardio"] + "label": ["CARDIO"] }, "Precor Chin/Dip Assist": { "clean_name": "Chin/Dip Assist", - "label": ["Back", "Chest", "Triceps"] + "label": ["BACK", "CHEST", "TRICEPS"] }, "Power Lift Bench Presses": { "clean_name": "Power Lift Bench Presses", - "label": ["Chest", "Triceps", "Shoulders"] + "label": ["CHEST", "TRICEPS", "SHOULDERS"] }, "Power Lift Prone Leg Curl": { "clean_name": "Power Lift Prone Leg Curl", - "label": ["Hamstrings"] + "label": ["HAMSTRINGS"] }, - "Precor Chest Press": { - "clean_name": "Chest Press", - "label": ["Chest", "Triceps", "Shoulders"] + "Precor CHEST Press": { + "clean_name": "CHEST Press", + "label": ["CHEST", "TRICEPS", "SHOULDERS"] }, "Soft TIYR (tire) 60kg": { "clean_name": "Soft TIYR (tire) 60kg", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Calf Press": { "clean_name": "Calf Press", - "label": ["Calves"] + "label": ["CALVES"] }, "Power Lift Leg Extension": { "clean_name": "Power Lift Leg Extension", - "label": ["Quads"] + "label": ["QUADS"] }, "Power Lift Half Racks & Platforms": { "clean_name": "Power Lift Half Racks & Platforms", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Slam Balls": { "clean_name": "Slam Balls", - "label": ["Abdominals"] + "label": ["ABDOMINALS"] }, "Precor Hi/Lo Pulley": { "clean_name": "Hi/Lo Pulley", - "label": ["Miscellaneous"] + "label": ["MISCELLANEOUS"] }, "Precor Leg Press": { "clean_name": "Leg Press", - "label": ["Quads", "Glutes", "Calves"] + "label": ["QUADS", "GLUTES", "CALVES"] }, "Kettlebells 8kg-32kg": { "clean_name": "Kettlebells 8kg-32kg", - "label": ["Abdominals", "Glutes", "Biceps", "Back"] + "label": ["ABDOMINALS", "GLUTES", "BICEPS", "BACK"] }, "Medicine Balls 1kg-10kg": { "clean_name": "Medicine Balls 1kg-10kg", - "label": ["Glutes", "Back", "Abdominals", "Shoulders"] + "label": ["GLUTES", "BACK", "ABDOMINALS", "SHOULDERS"] }, "Precor Standing Leg Curl": { "clean_name": "Standing Leg Curl", - "label": ["Hamstrings"] + "label": ["HAMSTRINGS"] }, "Dumbbells 3lbs-125lbs": { "clean_name": "Dumbbells 3lbs-125lbs", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] }, "Precor Lat Pulldown": { "clean_name": "Lat Pulldown", - "label": ["Back", "Biceps"] + "label": ["BACK", "BICEPS"] }, "Barbells 45lbs": { "clean_name": "Barbells 45lbs", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] }, "Marpo Rope Trainer": { "clean_name": "Marpo Rope Trainer", - "label": ["Triceps", "Biceps", "Shoulders", "Chest", "Back"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST", "BACK"] } } \ No newline at end of file From 83762e372b746d9303e0dd789f39f312393af36b Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 6 Nov 2024 11:11:15 -0500 Subject: [PATCH 04/53] Removed test column in users for migration --- schema.graphql | 9 +++++++-- src/models/user.py | 1 - 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/schema.graphql b/schema.graphql index 024bc43..e9fcc63 100644 --- a/schema.graphql +++ b/schema.graphql @@ -158,7 +158,13 @@ type Mutation { enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(userId: Int!, workoutTime: DateTime!): Workout - createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!, userId: Int!): CreateReport + createReport( + createdAt: DateTime! + description: String! + gymId: Int! + issue: String! + userId: Int! + ): CreateReport } type OpenHours { @@ -221,7 +227,6 @@ type User { netId: String! name: String! workoutGoal: [DayOfWeekEnum] - newColumn: String giveaways: [Giveaway] reports: [Report] } diff --git a/src/models/user.py b/src/models/user.py index 537de61..20609d3 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -37,4 +37,3 @@ class User(Base): net_id = Column(String, nullable=False) name = Column(String, nullable=False) workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) - new_column = Column(String, nullable=True) From d94a98fb15204a6de82a00716aa67033514fdcce Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 6 Nov 2024 11:13:37 -0500 Subject: [PATCH 05/53] Removed dotenv usage --- src/database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/database.py b/src/database.py index 2a9e5fe..23d7c36 100644 --- a/src/database.py +++ b/src/database.py @@ -3,9 +3,9 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -import dotenv +# import dotenv -dotenv.load_dotenv() +# dotenv.load_dotenv() # Get database credentials with logging db_user = os.getenv("DB_USERNAME") From b7ab023d15d6af809c93289e62d6124131e11a66 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Thu, 7 Nov 2024 21:03:19 -0500 Subject: [PATCH 06/53] Adding migration --- .gitignore | 1 - Dockerfile | 1 + app.db | Bin 0 -> 36864 bytes app.py | 2 + manager.py | 11 -- migrations/README | 1 + migrations/alembic.ini | 45 +++++++++ migrations/env.py | 95 ++++++++++++++++++ migrations/script.py.mako | 24 +++++ .../f711f3c11324_initial_migration.py | 32 ++++++ schema.graphql | 8 +- src/database.py | 1 - 12 files changed, 201 insertions(+), 20 deletions(-) create mode 100644 app.db delete mode 100644 manager.py create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/f711f3c11324_initial_migration.py diff --git a/.gitignore b/.gitignore index 8ff10ce..e78072a 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,6 @@ build/ *.DS_Store .env .envrc -migrations .vscode Archive scripts diff --git a/Dockerfile b/Dockerfile index ec3e3b0..4da2df0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,4 +6,5 @@ COPY . . ENV MAX_CONCURRENT_PIP=4 RUN pip3 install --upgrade pip RUN pip3 install --exists-action w -r requirements.txt +RUN flask db upgrade CMD python3 app.py diff --git a/app.db b/app.db new file mode 100644 index 0000000000000000000000000000000000000000..d13a36bd5ea1e2c3da5371b30b0d968e9507a67d GIT binary patch literal 36864 zcmeI&!EVz^6o6s7Ep-xTgT%rMgqRIdq5`7nsteE!kqd>8Qa6BDS<_^QED|S;ZAx`@ z7wP4tcq^7HnYa#$LsJAx>$l?A9>+81d~=#=)E*r+{YZ%q!|^~yV#l~?n5OZM5QbqC z^j_5aY_s&q+U!Cf&AIbsrv>BTyB8bf??$0?Vw5+^UpN2VG&dgHIK5ue4QvP?fB*sr zAbdedy)7o}qFY~U@q6`hTgcw2WH&+Bewx0-qEwY&eS?!FfRcdOH`_6}TU z|5l0 zd&Y99(dlsfaTrHlZzwZ;Ev8zkP7beG_N`my|HUL~T@(7Xo{2cAlcn`4yWO$2y)-_5 zAbZM-$C()VQqNA@RbsDsP}g-oSNr{8&|7wIX|BKK^m)y)@7ysbjf<50PfDK3vtD|< zmQ5~gy<$fGVB|@8HK~Egf636Dz6^D>>$kPcph?r;jM!CSXY7w6e;8Z}{;S2LYFhTz zmU(_Lzp}3e$9~8Aq>EE`vT(VQiE`oQG~V^D*4pmo%C>-xJ*EcD=w zK6XSV#JT>erRw9Ab^CD3H2k2eK8Gjzhg^9wj)uv#x7gYCLc^;5MKfLB8yk8z%ij$B zVnYA{1Q0*~0R#|0009ILKmdW92yB`w_qJ=twVke1wcXuXN$17-KPQt*8VDeO00Iag zfB*srAbr$7F~A0tg_0 z00IagfB*srAb Date: Sun, 10 Nov 2024 16:26:55 -0500 Subject: [PATCH 07/53] Migration: moved scrapers under condition --- .github/workflows/deploy-dev.yml | 3 +- app.py | 85 +++++++++---------- migrations/env.py | 2 +- .../f711f3c11324_initial_migration.py | 12 ++- 4 files changed, 51 insertions(+), 51 deletions(-) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index a159ac8..3761857 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -39,11 +39,10 @@ jobs: touch tags source tags export IMAGE_TAG=${{ steps.vars.outputs.sha_short }} - echo "export IMAGE_TAG=${IMAGE_TAG}" > tags cd docker-compose docker stack rm the-stack sleep 20s sudo systemctl stop nginx sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth - yes | docker system prune -a \ No newline at end of file + yes | docker system prune -a diff --git a/app.py b/app.py index ff0e765..0b2ec16 100644 --- a/app.py +++ b/app.py @@ -9,13 +9,6 @@ from src.database import Base as db from flask_migrate import Migrate from src.schema import Query, Mutation -from src.scrapers.capacities_scraper import fetch_capacities -from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility -from src.scrapers.scraper_helpers import clean_past_hours -from src.scrapers.sp_hours_scraper import fetch_sp_facility -from src.scrapers.equipment_scraper import scrape_equipment -from src.scrapers.class_scraper import fetch_classes -from src.scrapers.activities_scraper import fetch_activity from src.utils.utils import create_gym_table from src.models.openhours import OpenHours from src.database import db_url, db_user, db_password, db_name, db_host, db_port @@ -45,8 +38,6 @@ app.config['SQLALCHEMY_DATABASE_URI'] = db_url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False -migrate = Migrate(app, db) - schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) @@ -72,52 +63,58 @@ def shutdown_session(exception=None): db_session.remove() -# Scrape hours every 15 minutes -@scheduler.task("interval", id="scrape_hours", seconds=900) -def scrape_hours(): - logging.info("Scraping hours from sheets...") - - # Clear hours - db_session.query(OpenHours).delete() +# Create schema.graphql +with open("schema.graphql", "w+") as schema_file: + schema_file.write(schema_printer.print_schema(schema)) + schema_file.close() - fetch_reg_facility() - fetch_reg_building() - fetch_sp_facility() - clean_past_hours() +if __name__ == "__main__": + from src.scrapers.capacities_scraper import fetch_capacities + from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility + from src.scrapers.scraper_helpers import clean_past_hours + from src.scrapers.sp_hours_scraper import fetch_sp_facility + from src.scrapers.equipment_scraper import scrape_equipment + from src.scrapers.class_scraper import fetch_classes + from src.scrapers.activities_scraper import fetch_activity + # Scrape hours every 15 minutes + @scheduler.task("interval", id="scrape_hours", seconds=900) + def scrape_hours(): + logging.info("Scraping hours from sheets...") -# Scrape capacities every 10 minutes -@scheduler.task("interval", id="scrape_capacities", seconds=600) -def scrape_capacities(): - logging.info("Scraping capacities from C2C...") + # Clear hours + db_session.query(OpenHours).delete() - fetch_capacities() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + # Scrape capacities every 10 minutes -# Scrape classes every hour -@scheduler.task("interval", id="scrape_classes", seconds=3600) -def scrape_classes(): - logging.info("Scraping classes from group-fitness-classes...") + @scheduler.task("interval", id="scrape_capacities", seconds=600) + def scrape_capacities(): + logging.info("Scraping capacities from C2C...") - fetch_classes(10) + fetch_capacities() + # Scrape classes every hour -# Create database and fill it with data -init_db() -create_gym_table() + @scheduler.task("interval", id="scrape_classes", seconds=3600) + def scrape_classes(): + logging.info("Scraping classes from group-fitness-classes...") -scrape_classes() -scrape_hours() -scrape_capacities() -scrape_equipment() + fetch_classes(10) -logging.info("Scraping activities from sheets...") -fetch_activity() + # Create database and fill it with data + init_db() + create_gym_table() -# Create schema.graphql -with open("schema.graphql", "w+") as schema_file: - schema_file.write(schema_printer.print_schema(schema)) - schema_file.close() + scrape_classes() + scrape_hours() + scrape_capacities() + scrape_equipment() + logging.info("Scraping activities from sheets...") + fetch_activity() -if __name__ == "__main__": app.run(host="127.0.0.1", port=5000) diff --git a/migrations/env.py b/migrations/env.py index 169d487..6b3bfa6 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,4 +1,5 @@ from __future__ import with_statement +from flask import current_app import logging from logging.config import fileConfig @@ -21,7 +22,6 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from flask import current_app config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) target_metadata = current_app.extensions['migrate'].db.metadata diff --git a/migrations/versions/f711f3c11324_initial_migration.py b/migrations/versions/f711f3c11324_initial_migration.py index 949a2e3..3fc728e 100644 --- a/migrations/versions/f711f3c11324_initial_migration.py +++ b/migrations/versions/f711f3c11324_initial_migration.py @@ -19,14 +19,18 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=0) + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + op.alter_column('equipment', 'categories', + new_column_name='muscle_groups') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=False) + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=False) + op.alter_column('equipment', 'muscle_groups', + new_column_name='categories') # ### end Alembic commands ### From 2d73670b2431db8b222b3fb04c7fb3083c966e4a Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 10 Nov 2024 16:44:37 -0500 Subject: [PATCH 08/53] Added migrate to app.py --- app.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app.py b/app.py index 0b2ec16..545a755 100644 --- a/app.py +++ b/app.py @@ -37,7 +37,7 @@ app.config['SQLALCHEMY_DATABASE_URI'] = db_url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - +migrate = Migrate(app, db) schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) @@ -69,6 +69,7 @@ def shutdown_session(exception=None): schema_file.close() if __name__ == "__main__": + print("Starting app...") from src.scrapers.capacities_scraper import fetch_capacities from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility from src.scrapers.scraper_helpers import clean_past_hours From 404e39c8ef40657f277f0fa3edf015d5dbc05222 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 13 Nov 2024 17:57:31 -0500 Subject: [PATCH 09/53] Implemented new migration script --- ...70_add_muscle_groups_and_clean_name_to_.py | 105 ++++++++++++++++++ .../f711f3c11324_initial_migration.py | 36 ------ src/models/user.py | 1 + 3 files changed, 106 insertions(+), 36 deletions(-) create mode 100644 migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py delete mode 100644 migrations/versions/f711f3c11324_initial_migration.py diff --git a/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py b/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py new file mode 100644 index 0000000..d63c007 --- /dev/null +++ b/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py @@ -0,0 +1,105 @@ +"""add muscle groups and clean name to equipment + +Revision ID: 79fec8ab4c70 +Revises: f711f3c11324 +Create Date: 2024-11-13 17:48:46.093175 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +from sqlalchemy import Enum + + +# revision identifiers, used by Alembic. +revision = '79fec8ab4c70' +down_revision = None +branch_labels = None +depends_on = None + +# Define enums for reference +class MuscleGroup(Enum): + ABDOMINALS = 1 + CHEST = 2 + BACK = 3 + SHOULDERS = 4 + BICEPS = 5 + TRICEPS = 6 + HAMSTRINGS = 7 + QUADS = 8 + GLUTES = 9 + CALVES = 10 + MISCELLANEOUS = 11 + CARDIO = 12 + +class AccessibilityType(Enum): + wheelchair = 0 + +def upgrade(): + # First, delete all existing equipment data + op.execute('TRUNCATE TABLE equipment CASCADE') + + # Drop the old enum type + op.execute('DROP TYPE IF EXISTS equipmenttype') + + # Create new enum types + muscle_group_enum = postgresql.ENUM( + 'ABDOMINALS', 'CHEST', 'BACK', 'SHOULDERS', 'BICEPS', 'TRICEPS', + 'HAMSTRINGS', 'QUADS', 'GLUTES', 'CALVES', 'MISCELLANEOUS', 'CARDIO', + name='musclegroup' + ) + muscle_group_enum.create(op.get_bind()) + + new_accessibility_enum = postgresql.ENUM('wheelchair', name='accessibilitytype_new') + new_accessibility_enum.create(op.get_bind()) + + # Drop old columns + op.drop_column('equipment', 'equipment_type') + op.drop_column('equipment', 'accessibility') + + # Add new columns + op.add_column('equipment', sa.Column('clean_name', sa.String(), nullable=False)) + op.add_column('equipment', + sa.Column('muscle_groups', postgresql.ARRAY(muscle_group_enum), nullable=True) + ) + op.add_column('equipment', + sa.Column('accessibility', new_accessibility_enum, nullable=True) + ) + +def downgrade(): + # First, delete all equipment data + op.execute('TRUNCATE TABLE equipment CASCADE') + + # Create old equipment_type enum + old_equipment_type_enum = postgresql.ENUM( + 'cardio', + 'racks_and_benches', + 'selectorized', + 'multi_cable', + 'free_weights', + 'miscellaneous', + 'plate_loaded', + name='equipmenttype' + ) + old_equipment_type_enum.create(op.get_bind()) + + # Drop new columns + op.drop_column('equipment', 'clean_name') + op.drop_column('equipment', 'muscle_groups') + op.drop_column('equipment', 'accessibility') + + # Add back old columns + op.add_column('equipment', + sa.Column('equipment_type', old_equipment_type_enum, nullable=False) + ) + + # Drop new enums + postgresql.ENUM(name='musclegroup').drop(op.get_bind()) + postgresql.ENUM(name='accessibilitytype_new').drop(op.get_bind(), cascade=True) + + # Recreate old accessibility enum and column + old_accessibility_enum = postgresql.ENUM('wheelchair', name='accessibilitytype') + old_accessibility_enum.create(op.get_bind()) + op.add_column('equipment', + sa.Column('accessibility', old_accessibility_enum, nullable=True) + ) diff --git a/migrations/versions/f711f3c11324_initial_migration.py b/migrations/versions/f711f3c11324_initial_migration.py deleted file mode 100644 index 3fc728e..0000000 --- a/migrations/versions/f711f3c11324_initial_migration.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Initial migration - -Revision ID: f711f3c11324 -Revises: -Create Date: 2024-11-07 21:01:33.168047 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'f711f3c11324' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=0) - op.alter_column('equipment', 'categories', - new_column_name='muscle_groups') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=False) - op.alter_column('equipment', 'muscle_groups', - new_column_name='categories') - # ### end Alembic commands ### diff --git a/src/models/user.py b/src/models/user.py index 20609d3..a442f30 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -24,6 +24,7 @@ class User(Base): - `email` The user's email address. - `giveaways` (nullable) The list of giveaways a user is entered into. - `net_id` The user's Net ID. + - `reports` The list of reports a user has submitted. - `name` The user's name. - `workout_goal` The days of the week the user has set as their personal goal. """ From f7941eb30c748645c9e1e6fb7eb0140d016c2ae3 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 17 Nov 2024 14:25:00 -0500 Subject: [PATCH 10/53] Moved import lines with constant.py into main --- app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 545a755..056f397 100644 --- a/app.py +++ b/app.py @@ -9,8 +9,6 @@ from src.database import Base as db from flask_migrate import Migrate from src.schema import Query, Mutation -from src.utils.utils import create_gym_table -from src.models.openhours import OpenHours from src.database import db_url, db_user, db_password, db_name, db_host, db_port from flasgger import Swagger @@ -77,6 +75,8 @@ def shutdown_session(exception=None): from src.scrapers.equipment_scraper import scrape_equipment from src.scrapers.class_scraper import fetch_classes from src.scrapers.activities_scraper import fetch_activity + from src.utils.utils import create_gym_table + from src.models.openhours import OpenHours # Scrape hours every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) From 9cb88ecd9b74eae984a3b6a1baded53d3a4b7a20 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 17 Nov 2024 21:06:49 -0500 Subject: [PATCH 11/53] Edited dev workflow file and dockerfile --- .github/workflows/deploy-dev.yml | 7 +++++++ Dockerfile | 17 +++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index 3761857..d09d50b 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -27,6 +27,13 @@ jobs: file: ./Dockerfile push: true tags: cornellappdev/uplift-dev:${{ steps.vars.outputs.sha_short }} + build-args: | + DB_PASSWORD=${{ secrets.DB_PASSWORD }} + DB_HOST=postgres + DB_NAME=${{ secrets.DB_NAME }} + DB_USERNAME=${{ secrets.DB_USERNAME }} + DB_PORT=${{ secrets.DB_PORT }} + FLASK_ENV=development - name: Remote SSH and Deploy uses: appleboy/ssh-action@master env: diff --git a/Dockerfile b/Dockerfile index 4da2df0..f359f37 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,5 +6,22 @@ COPY . . ENV MAX_CONCURRENT_PIP=4 RUN pip3 install --upgrade pip RUN pip3 install --exists-action w -r requirements.txt + +# to receive build arguments +ARG DB_PASSWORD +ARG DB_HOST +ARG DB_NAME +ARG DB_USERNAME +ARG DB_PORT +ARG FLASK_ENV + +# set env variables for build +ENV DB_PASSWORD=${DB_PASSWORD} +ENV DB_HOST=${DB_HOST} +ENV DB_NAME=${DB_NAME} +ENV DB_USERNAME=${DB_USERNAME} +ENV DB_PORT=${DB_PORT} +ENV FLASK_ENV=${FLASK_ENV} + RUN flask db upgrade CMD python3 app.py From 7d38e452f91d164f656bc00f4c89c73b06d19909 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 20 Nov 2024 17:11:41 -0500 Subject: [PATCH 12/53] Moved migration execution to dev server --- .github/workflows/deploy-dev.yml | 9 ++------- Dockerfile | 18 ------------------ 2 files changed, 2 insertions(+), 25 deletions(-) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index d09d50b..0e42987 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -27,13 +27,6 @@ jobs: file: ./Dockerfile push: true tags: cornellappdev/uplift-dev:${{ steps.vars.outputs.sha_short }} - build-args: | - DB_PASSWORD=${{ secrets.DB_PASSWORD }} - DB_HOST=postgres - DB_NAME=${{ secrets.DB_NAME }} - DB_USERNAME=${{ secrets.DB_USERNAME }} - DB_PORT=${{ secrets.DB_PORT }} - FLASK_ENV=development - name: Remote SSH and Deploy uses: appleboy/ssh-action@master env: @@ -52,4 +45,6 @@ jobs: sudo systemctl stop nginx sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth + sleep 30s + docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade yes | docker system prune -a diff --git a/Dockerfile b/Dockerfile index f359f37..ec3e3b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,22 +6,4 @@ COPY . . ENV MAX_CONCURRENT_PIP=4 RUN pip3 install --upgrade pip RUN pip3 install --exists-action w -r requirements.txt - -# to receive build arguments -ARG DB_PASSWORD -ARG DB_HOST -ARG DB_NAME -ARG DB_USERNAME -ARG DB_PORT -ARG FLASK_ENV - -# set env variables for build -ENV DB_PASSWORD=${DB_PASSWORD} -ENV DB_HOST=${DB_HOST} -ENV DB_NAME=${DB_NAME} -ENV DB_USERNAME=${DB_USERNAME} -ENV DB_PORT=${DB_PORT} -ENV FLASK_ENV=${FLASK_ENV} - -RUN flask db upgrade CMD python3 app.py From 1a62c987bef7d3099473e65218e57c5e128cbb6c Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 20 Nov 2024 17:42:25 -0500 Subject: [PATCH 13/53] Added updated migrations file --- ...a0f_update_equipment_table_with_muscle_.py | 32 ++++++ ...70_add_muscle_groups_and_clean_name_to_.py | 105 ------------------ 2 files changed, 32 insertions(+), 105 deletions(-) create mode 100644 migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py delete mode 100644 migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py diff --git a/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py b/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py new file mode 100644 index 0000000..7e16351 --- /dev/null +++ b/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py @@ -0,0 +1,32 @@ +"""update equipment table with muscle groups + +Revision ID: 24684343da0f +Revises: +Create Date: 2024-11-20 17:40:32.344965 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '24684343da0f' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=False) + # ### end Alembic commands ### diff --git a/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py b/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py deleted file mode 100644 index d63c007..0000000 --- a/migrations/versions/79fec8ab4c70_add_muscle_groups_and_clean_name_to_.py +++ /dev/null @@ -1,105 +0,0 @@ -"""add muscle groups and clean name to equipment - -Revision ID: 79fec8ab4c70 -Revises: f711f3c11324 -Create Date: 2024-11-13 17:48:46.093175 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy import Enum - - -# revision identifiers, used by Alembic. -revision = '79fec8ab4c70' -down_revision = None -branch_labels = None -depends_on = None - -# Define enums for reference -class MuscleGroup(Enum): - ABDOMINALS = 1 - CHEST = 2 - BACK = 3 - SHOULDERS = 4 - BICEPS = 5 - TRICEPS = 6 - HAMSTRINGS = 7 - QUADS = 8 - GLUTES = 9 - CALVES = 10 - MISCELLANEOUS = 11 - CARDIO = 12 - -class AccessibilityType(Enum): - wheelchair = 0 - -def upgrade(): - # First, delete all existing equipment data - op.execute('TRUNCATE TABLE equipment CASCADE') - - # Drop the old enum type - op.execute('DROP TYPE IF EXISTS equipmenttype') - - # Create new enum types - muscle_group_enum = postgresql.ENUM( - 'ABDOMINALS', 'CHEST', 'BACK', 'SHOULDERS', 'BICEPS', 'TRICEPS', - 'HAMSTRINGS', 'QUADS', 'GLUTES', 'CALVES', 'MISCELLANEOUS', 'CARDIO', - name='musclegroup' - ) - muscle_group_enum.create(op.get_bind()) - - new_accessibility_enum = postgresql.ENUM('wheelchair', name='accessibilitytype_new') - new_accessibility_enum.create(op.get_bind()) - - # Drop old columns - op.drop_column('equipment', 'equipment_type') - op.drop_column('equipment', 'accessibility') - - # Add new columns - op.add_column('equipment', sa.Column('clean_name', sa.String(), nullable=False)) - op.add_column('equipment', - sa.Column('muscle_groups', postgresql.ARRAY(muscle_group_enum), nullable=True) - ) - op.add_column('equipment', - sa.Column('accessibility', new_accessibility_enum, nullable=True) - ) - -def downgrade(): - # First, delete all equipment data - op.execute('TRUNCATE TABLE equipment CASCADE') - - # Create old equipment_type enum - old_equipment_type_enum = postgresql.ENUM( - 'cardio', - 'racks_and_benches', - 'selectorized', - 'multi_cable', - 'free_weights', - 'miscellaneous', - 'plate_loaded', - name='equipmenttype' - ) - old_equipment_type_enum.create(op.get_bind()) - - # Drop new columns - op.drop_column('equipment', 'clean_name') - op.drop_column('equipment', 'muscle_groups') - op.drop_column('equipment', 'accessibility') - - # Add back old columns - op.add_column('equipment', - sa.Column('equipment_type', old_equipment_type_enum, nullable=False) - ) - - # Drop new enums - postgresql.ENUM(name='musclegroup').drop(op.get_bind()) - postgresql.ENUM(name='accessibilitytype_new').drop(op.get_bind(), cascade=True) - - # Recreate old accessibility enum and column - old_accessibility_enum = postgresql.ENUM('wheelchair', name='accessibilitytype') - old_accessibility_enum.create(op.get_bind()) - op.add_column('equipment', - sa.Column('accessibility', old_accessibility_enum, nullable=True) - ) From dcd6d056218eaea96e5a9125c3f84adff655e184 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 20 Nov 2024 17:57:35 -0500 Subject: [PATCH 14/53] Changed github workflow file to try migrations 5 times --- .github/workflows/deploy-dev.yml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index 0e42987..e31dbfa 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -45,6 +45,17 @@ jobs: sudo systemctl stop nginx sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth - sleep 30s - docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade + sleep 60s + attempt=1 + max_attempts=5 + until docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade || [ $attempt -eq $max_attempts ] + do + echo "Migration attempt $attempt failed. Retrying..." + sleep 15 + attempt=$((attempt + 1)) + done + if [ $attempt -eq $max_attempts ]; then + echo "Migration failed after $max_attempts attempts" + exit 1 + fi yes | docker system prune -a From 7f287f3d16ce5ba756afcc34545cf57c4671f70a Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 20 Nov 2024 18:11:26 -0500 Subject: [PATCH 15/53] Removed alembic dependency to previous migrations --- .github/workflows/deploy-dev.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index e31dbfa..5f185ec 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -46,6 +46,8 @@ jobs: sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth sleep 60s + docker exec $(docker ps -q -f name=the-stack_web) flask db stamp head + docker exec $(docker ps -q -f name=the-stack_web) flask db current attempt=1 max_attempts=5 until docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade || [ $attempt -eq $max_attempts ] From 9f92efe2fa83a0fa3f9f73d5591076c775bf652a Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Thu, 21 Nov 2024 00:04:26 -0500 Subject: [PATCH 16/53] Reverted scrapers to run without main --- .github/workflows/deploy-dev.yml | 2 +- app.py | 87 ++++++++++++++++---------------- 2 files changed, 44 insertions(+), 45 deletions(-) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index 5f185ec..fc2e6cb 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -46,8 +46,8 @@ jobs: sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth sleep 60s + docker exec $(docker ps -q -f name=the-stack_web) psql $DB_URL -c "DELETE FROM alembic_version;" docker exec $(docker ps -q -f name=the-stack_web) flask db stamp head - docker exec $(docker ps -q -f name=the-stack_web) flask db current attempt=1 max_attempts=5 until docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade || [ $attempt -eq $max_attempts ] diff --git a/app.py b/app.py index 056f397..bbecb2a 100644 --- a/app.py +++ b/app.py @@ -7,6 +7,15 @@ from graphql.utils import schema_printer from src.database import db_session, init_db from src.database import Base as db +from src.scrapers.capacities_scraper import fetch_capacities +from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility +from src.scrapers.scraper_helpers import clean_past_hours +from src.scrapers.sp_hours_scraper import fetch_sp_facility +from src.scrapers.equipment_scraper import scrape_equipment +from src.scrapers.class_scraper import fetch_classes +from src.scrapers.activities_scraper import fetch_activity +from src.utils.utils import create_gym_table +from src.models.openhours import OpenHours from flask_migrate import Migrate from src.schema import Query, Mutation from src.database import db_url, db_user, db_password, db_name, db_host, db_port @@ -61,61 +70,51 @@ def shutdown_session(exception=None): db_session.remove() -# Create schema.graphql -with open("schema.graphql", "w+") as schema_file: - schema_file.write(schema_printer.print_schema(schema)) - schema_file.close() +# Scrape hours every 15 minutes -if __name__ == "__main__": - print("Starting app...") - from src.scrapers.capacities_scraper import fetch_capacities - from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility - from src.scrapers.scraper_helpers import clean_past_hours - from src.scrapers.sp_hours_scraper import fetch_sp_facility - from src.scrapers.equipment_scraper import scrape_equipment - from src.scrapers.class_scraper import fetch_classes - from src.scrapers.activities_scraper import fetch_activity - from src.utils.utils import create_gym_table - from src.models.openhours import OpenHours - # Scrape hours every 15 minutes +@scheduler.task("interval", id="scrape_hours", seconds=900) +def scrape_hours(): + logging.info("Scraping hours from sheets...") - @scheduler.task("interval", id="scrape_hours", seconds=900) - def scrape_hours(): - logging.info("Scraping hours from sheets...") + # Clear hours + db_session.query(OpenHours).delete() - # Clear hours - db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() - fetch_reg_facility() - fetch_reg_building() - fetch_sp_facility() - clean_past_hours() +# Scrape capacities every 10 minutes - # Scrape capacities every 10 minutes +@scheduler.task("interval", id="scrape_capacities", seconds=600) +def scrape_capacities(): + logging.info("Scraping capacities from C2C...") - @scheduler.task("interval", id="scrape_capacities", seconds=600) - def scrape_capacities(): - logging.info("Scraping capacities from C2C...") + fetch_capacities() - fetch_capacities() +# Scrape classes every hour - # Scrape classes every hour +@scheduler.task("interval", id="scrape_classes", seconds=3600) +def scrape_classes(): + logging.info("Scraping classes from group-fitness-classes...") - @scheduler.task("interval", id="scrape_classes", seconds=3600) - def scrape_classes(): - logging.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) - fetch_classes(10) +# Create database and fill it with data +init_db() +create_gym_table() - # Create database and fill it with data - init_db() - create_gym_table() +scrape_classes() +scrape_hours() +scrape_capacities() +scrape_equipment() +logging.info("Scraping activities from sheets...") +fetch_activity() - scrape_classes() - scrape_hours() - scrape_capacities() - scrape_equipment() - logging.info("Scraping activities from sheets...") - fetch_activity() +# Create schema.graphql +with open("schema.graphql", "w+") as schema_file: + schema_file.write(schema_printer.print_schema(schema)) + schema_file.close() +if __name__ == "__main__": app.run(host="127.0.0.1", port=5000) From 43049b0e8b4736f04e450fba76a74940387d8b3c Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 24 Nov 2024 15:23:18 -0500 Subject: [PATCH 17/53] Created migration mode in app --- .github/workflows/deploy-dev.yml | 6 +- Dockerfile | 1 + app.py | 151 ++++++++++-------- ...a0f_update_equipment_table_with_muscle_.py | 110 +++++++++++-- 4 files changed, 192 insertions(+), 76 deletions(-) diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index fc2e6cb..a5fb051 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -47,10 +47,10 @@ jobs: docker stack deploy -c docker-compose.yml the-stack --with-registry-auth sleep 60s docker exec $(docker ps -q -f name=the-stack_web) psql $DB_URL -c "DELETE FROM alembic_version;" - docker exec $(docker ps -q -f name=the-stack_web) flask db stamp head + docker exec $(docker ps -q -f name=the-stack_web) /bin/sh -c "FLASK_MIGRATE=true flask db stamp head" attempt=1 - max_attempts=5 - until docker exec $(docker ps -q -f name=the-stack_web) flask db upgrade || [ $attempt -eq $max_attempts ] + max_attempts=3 + until docker exec $(docker ps -q -f name=the-stack_web) /bin/sh -c "FLASK_MIGRATE=true flask db upgrade" || [ $attempt -eq $max_attempts ] do echo "Migration attempt $attempt failed. Retrying..." sleep 15 diff --git a/Dockerfile b/Dockerfile index ec3e3b0..32b14d2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,6 @@ FROM python:3.9 ENV TZ="America/New_York" +ENV FLASK_MIGRATE=false RUN mkdir -p /usr/src/app WORKDIR /usr/src/app COPY . . diff --git a/app.py b/app.py index bbecb2a..96f6343 100644 --- a/app.py +++ b/app.py @@ -1,34 +1,40 @@ import logging +import os import sentry_sdk from flask import Flask, render_template -from flask_apscheduler import APScheduler -from flask_graphql import GraphQLView from graphene import Schema from graphql.utils import schema_printer from src.database import db_session, init_db from src.database import Base as db -from src.scrapers.capacities_scraper import fetch_capacities -from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility -from src.scrapers.scraper_helpers import clean_past_hours -from src.scrapers.sp_hours_scraper import fetch_sp_facility -from src.scrapers.equipment_scraper import scrape_equipment -from src.scrapers.class_scraper import fetch_classes -from src.scrapers.activities_scraper import fetch_activity -from src.utils.utils import create_gym_table +from src.database import db_url, db_user, db_password, db_name, db_host, db_port from src.models.openhours import OpenHours from flask_migrate import Migrate from src.schema import Query, Mutation -from src.database import db_url, db_user, db_password, db_name, db_host, db_port from flasgger import Swagger +from flask_graphql import GraphQLView + +# Check if we're in migration mode with error handling +try: + FLASK_MIGRATE = os.getenv('FLASK_MIGRATE', 'false').lower() == 'true' +except Exception as e: + logging.warning(f"Error reading FLASK_MIGRATE environment variable: {e}. Defaulting to false.") + FLASK_MIGRATE = False + +# Only import scraping-related modules if not in migration mode +if not FLASK_MIGRATE: + from flask_apscheduler import APScheduler + from src.scrapers.capacities_scraper import fetch_capacities + from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility + from src.scrapers.scraper_helpers import clean_past_hours + from src.scrapers.sp_hours_scraper import fetch_sp_facility + from src.scrapers.equipment_scraper import scrape_equipment + from src.scrapers.class_scraper import fetch_classes + from src.scrapers.activities_scraper import fetch_activity + from src.utils.utils import create_gym_table sentry_sdk.init( dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", - # Set traces_sample_rate to 1.0 to capture 100% - # of transactions for tracing. traces_sample_rate=1.0, - # Set profiles_sample_rate to 1.0 to profile 100% - # of sampled transactions. - # We recommend adjusting this value in production. profiles_sample_rate=1.0, ) @@ -48,68 +54,87 @@ schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) -# Scheduler -scheduler = APScheduler() -scheduler.init_app(app) -scheduler.start() +def should_run_initial_scrape(): + """ + Check if we should run initial scraping: + - Not in migration mode + - Running in the main process (not Flask reloader) + Added because flask will automatically run the app twice in debug mode, + causing us to call the api twice in succession causing a timeout from the google api. + """ + is_main_process = os.environ.get('WERKZEUG_RUN_MAIN') == 'true' + return not FLASK_MIGRATE and is_main_process + +# Initialize scheduler only if not in migration mode +if not FLASK_MIGRATE: + scheduler = APScheduler() + if should_run_initial_scrape(): + scheduler.init_app(app) + scheduler.start() # Logging logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") - @app.route("/") def index(): return render_template("index.html") - app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) - @app.teardown_appcontext def shutdown_session(exception=None): db_session.remove() - -# Scrape hours every 15 minutes - -@scheduler.task("interval", id="scrape_hours", seconds=900) -def scrape_hours(): - logging.info("Scraping hours from sheets...") - - # Clear hours - db_session.query(OpenHours).delete() - - fetch_reg_facility() - fetch_reg_building() - fetch_sp_facility() - clean_past_hours() - -# Scrape capacities every 10 minutes - -@scheduler.task("interval", id="scrape_capacities", seconds=600) -def scrape_capacities(): - logging.info("Scraping capacities from C2C...") - - fetch_capacities() - -# Scrape classes every hour - -@scheduler.task("interval", id="scrape_classes", seconds=3600) -def scrape_classes(): - logging.info("Scraping classes from group-fitness-classes...") - - fetch_classes(10) - -# Create database and fill it with data +# Only define scheduler tasks if not in migration mode +if not FLASK_MIGRATE: + # Scrape hours every 15 minutes + @scheduler.task("interval", id="scrape_hours", seconds=900) + def scrape_hours(): + try: + logging.info("Scraping hours from sheets...") + # Clear hours + db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + except Exception as e: + logging.error(f"Error in scrape_hours: {e}") + + # Scrape capacities every 10 minutes + @scheduler.task("interval", id="scrape_capacities", seconds=600) + def scrape_capacities(): + try: + logging.info("Scraping capacities from C2C...") + fetch_capacities() + except Exception as e: + logging.error(f"Error in scrape_capacities: {e}") + + # Scrape classes every hour + @scheduler.task("interval", id="scrape_classes", seconds=3600) + def scrape_classes(): + try: + logging.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) + except Exception as e: + logging.error(f"Error in scrape_classes: {e}") + +# Create database init_db() -create_gym_table() - -scrape_classes() -scrape_hours() -scrape_capacities() -scrape_equipment() -logging.info("Scraping activities from sheets...") -fetch_activity() + +# Run initial scraping only in main process and not in migration mode +if should_run_initial_scrape(): + logging.info("Running initial scraping...") + try: + create_gym_table() + scrape_classes() + scrape_hours() + scrape_capacities() + scrape_equipment() + logging.info("Scraping activities from sheets...") + fetch_activity() + except Exception as e: + logging.error(f"Error during initial scraping: {e}") # Create schema.graphql with open("schema.graphql", "w+") as schema_file: diff --git a/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py b/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py index 7e16351..bf9d7f2 100644 --- a/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py +++ b/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py @@ -8,6 +8,8 @@ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql +from sqlalchemy import Enum +from enum import Enum as PyEnum # revision identifiers, used by Alembic. revision = '24684343da0f' @@ -15,18 +17,106 @@ branch_labels = None depends_on = None +class MuscleGroup(PyEnum): + ABDOMINALS = 1 + CHEST = 2 + BACK = 3 + SHOULDERS = 4 + BICEPS = 5 + TRICEPS = 6 + HAMSTRINGS = 7 + QUADS = 8 + GLUTES = 9 + CALVES = 10 + MISCELLANEOUS = 11 + CARDIO = 12 def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=0) - # ### end Alembic commands ### + # Create new muscle_group enum type + muscle_group_enum = postgresql.ENUM( + 'ABDOMINALS', 'CHEST', 'BACK', 'SHOULDERS', 'BICEPS', 'TRICEPS', + 'HAMSTRINGS', 'QUADS', 'GLUTES', 'CALVES', 'MISCELLANEOUS', 'CARDIO', + name='musclegroup' + ) + muscle_group_enum.create(op.get_bind()) + # Add new columns first + op.add_column('equipment', sa.Column('clean_name', sa.String(), nullable=True)) + op.add_column('equipment', + sa.Column('muscle_groups', postgresql.ARRAY(muscle_group_enum), nullable=True) + ) + + # Update data: Set clean_name equal to name initially + op.execute('UPDATE equipment SET clean_name = name') + + # Convert equipment_type to muscle_groups based on mapping + op.execute(""" + UPDATE equipment SET muscle_groups = CASE + WHEN equipment_type = 'cardio' THEN ARRAY['CARDIO']::musclegroup[] + WHEN equipment_type = 'racks_and_benches' THEN ARRAY['CHEST', 'BACK', 'SHOULDERS']::musclegroup[] + WHEN equipment_type = 'selectorized' THEN ARRAY['MISCELLANEOUS']::musclegroup[] + WHEN equipment_type = 'multi_cable' THEN ARRAY['MISCELLANEOUS']::musclegroup[] + WHEN equipment_type = 'free_weights' THEN ARRAY['MISCELLANEOUS']::musclegroup[] + WHEN equipment_type = 'plate_loaded' THEN ARRAY['MISCELLANEOUS']::musclegroup[] + ELSE ARRAY['MISCELLANEOUS']::musclegroup[] + END + """) + + # Make clean_name not nullable after updating data + op.alter_column('equipment', 'clean_name', + existing_type=sa.String(), + nullable=False) + + # Make muscle_groups not nullable after data migration + op.alter_column('equipment', 'muscle_groups', + existing_type=postgresql.ARRAY(muscle_group_enum), + nullable=False) + + # Drop the old equipment_type column and enum + op.drop_column('equipment', 'equipment_type') + op.execute('DROP TYPE equipmenttype') def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=False) - # ### end Alembic commands ### + # Create old equipment_type enum + op.execute(""" + CREATE TYPE equipmenttype AS ENUM ( + 'cardio', + 'racks_and_benches', + 'selectorized', + 'multi_cable', + 'free_weights', + 'miscellaneous', + 'plate_loaded' + ) + """) + + # Add equipment_type column + op.add_column('equipment', + sa.Column('equipment_type', postgresql.ENUM('cardio', 'racks_and_benches', 'selectorized', + 'multi_cable', 'free_weights', 'miscellaneous', + 'plate_loaded', name='equipmenttype'), + nullable=True)) + + # Convert muscle_groups back to equipment_type + op.execute(""" + UPDATE equipment SET equipment_type = CASE + WHEN 'CARDIO' = ANY(muscle_groups) THEN 'cardio'::equipmenttype + WHEN 'CHEST' = ANY(muscle_groups) OR 'BACK' = ANY(muscle_groups) OR 'SHOULDERS' = ANY(muscle_groups) + THEN 'racks_and_benches'::equipmenttype + ELSE 'miscellaneous'::equipmenttype + END + """) + + # Make equipment_type not nullable + op.alter_column('equipment', 'equipment_type', + existing_type=postgresql.ENUM('cardio', 'racks_and_benches', 'selectorized', + 'multi_cable', 'free_weights', 'miscellaneous', + 'plate_loaded', name='equipmenttype'), + nullable=False) + + # Drop new columns + op.drop_column('equipment', 'muscle_groups') + op.drop_column('equipment', 'clean_name') + + # Drop muscle_group enum + op.execute('DROP TYPE musclegroup') From bdb4acdfe5b9e10e05da13603960a2590a794499 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 26 Nov 2024 14:06:39 -0500 Subject: [PATCH 18/53] Added main process condition --- app.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/app.py b/app.py index 96f6343..d8447e1 100644 --- a/app.py +++ b/app.py @@ -58,17 +58,22 @@ def should_run_initial_scrape(): """ Check if we should run initial scraping: - Not in migration mode - - Running in the main process (not Flask reloader) - Added because flask will automatically run the app twice in debug mode, - causing us to call the api twice in succession causing a timeout from the google api. + - Either in production (no WERKZEUG_RUN_MAIN) or in the main Werkzeug process """ - is_main_process = os.environ.get('WERKZEUG_RUN_MAIN') == 'true' + is_development = app.debug + if is_development: + # In development, only run in main Werkzeug process + is_main_process = os.environ.get('WERKZEUG_RUN_MAIN') == 'true' + else: + # In production (Gunicorn), always consider it main process + is_main_process = True + return not FLASK_MIGRATE and is_main_process # Initialize scheduler only if not in migration mode if not FLASK_MIGRATE: scheduler = APScheduler() - if should_run_initial_scrape(): + if should_run_initial_scrape(): # Only start scheduler in main process scheduler.init_app(app) scheduler.start() @@ -87,7 +92,6 @@ def shutdown_session(exception=None): # Only define scheduler tasks if not in migration mode if not FLASK_MIGRATE: - # Scrape hours every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) def scrape_hours(): try: @@ -101,7 +105,6 @@ def scrape_hours(): except Exception as e: logging.error(f"Error in scrape_hours: {e}") - # Scrape capacities every 10 minutes @scheduler.task("interval", id="scrape_capacities", seconds=600) def scrape_capacities(): try: @@ -110,7 +113,6 @@ def scrape_capacities(): except Exception as e: logging.error(f"Error in scrape_capacities: {e}") - # Scrape classes every hour @scheduler.task("interval", id="scrape_classes", seconds=3600) def scrape_classes(): try: From b01ab73166a37f2e2995c1bb5cc39f4cb767d18c Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 26 Nov 2024 14:13:51 -0500 Subject: [PATCH 19/53] Simplified running condition --- app.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/app.py b/app.py index d8447e1..c80ef32 100644 --- a/app.py +++ b/app.py @@ -60,15 +60,7 @@ def should_run_initial_scrape(): - Not in migration mode - Either in production (no WERKZEUG_RUN_MAIN) or in the main Werkzeug process """ - is_development = app.debug - if is_development: - # In development, only run in main Werkzeug process - is_main_process = os.environ.get('WERKZEUG_RUN_MAIN') == 'true' - else: - # In production (Gunicorn), always consider it main process - is_main_process = True - - return not FLASK_MIGRATE and is_main_process + return not FLASK_MIGRATE and os.environ.get('WERKZEUG_RUN_MAIN') == 'true' # Initialize scheduler only if not in migration mode if not FLASK_MIGRATE: From 6c38e348df138d1bf9e0a8f89481d1df009a06d5 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 26 Nov 2024 14:15:25 -0500 Subject: [PATCH 20/53] Readded scraping comments --- app.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app.py b/app.py index c80ef32..38c65e6 100644 --- a/app.py +++ b/app.py @@ -84,6 +84,7 @@ def shutdown_session(exception=None): # Only define scheduler tasks if not in migration mode if not FLASK_MIGRATE: + # Scrape capacities every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) def scrape_hours(): try: @@ -97,6 +98,7 @@ def scrape_hours(): except Exception as e: logging.error(f"Error in scrape_hours: {e}") + # Scrape capacities every 10 minutes @scheduler.task("interval", id="scrape_capacities", seconds=600) def scrape_capacities(): try: @@ -105,6 +107,7 @@ def scrape_capacities(): except Exception as e: logging.error(f"Error in scrape_capacities: {e}") + # Scrape capacities every hour @scheduler.task("interval", id="scrape_classes", seconds=3600) def scrape_classes(): try: From 11ee72a847fa3477b12b36a4e104971d04f3574e Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 26 Nov 2024 14:16:04 -0500 Subject: [PATCH 21/53] Fixed scraping comments --- app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 38c65e6..aeecf17 100644 --- a/app.py +++ b/app.py @@ -84,7 +84,7 @@ def shutdown_session(exception=None): # Only define scheduler tasks if not in migration mode if not FLASK_MIGRATE: - # Scrape capacities every 15 minutes + # Scrape hours every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) def scrape_hours(): try: @@ -107,7 +107,7 @@ def scrape_capacities(): except Exception as e: logging.error(f"Error in scrape_capacities: {e}") - # Scrape capacities every hour + # Scrape classes every hour @scheduler.task("interval", id="scrape_classes", seconds=3600) def scrape_classes(): try: From add86cb2223782b560adf3b3179877dc8056b36e Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 26 Nov 2024 14:25:26 -0500 Subject: [PATCH 22/53] Modified run condition again --- app.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index aeecf17..da6790f 100644 --- a/app.py +++ b/app.py @@ -58,9 +58,16 @@ def should_run_initial_scrape(): """ Check if we should run initial scraping: - Not in migration mode - - Either in production (no WERKZEUG_RUN_MAIN) or in the main Werkzeug process + - Only in the main process (Werkzeug or Gunicorn) """ - return not FLASK_MIGRATE and os.environ.get('WERKZEUG_RUN_MAIN') == 'true' + # If in migration mode, don't run initial scraping + if FLASK_MIGRATE: + return False + # Check if we're in the main process + werkzeug_var = os.environ.get('WERKZEUG_RUN_MAIN') + # Logic: if in local, then werkzeug_var exists: so only run when true to prevent double running + # If in Gunicorn, then werkzeug_var is None, so then it will also run + return werkzeug_var is None or werkzeug_var == 'true' # Initialize scheduler only if not in migration mode if not FLASK_MIGRATE: From d5da9f4a73aeeac0b2c4ab601758a9f3b43a77f4 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Fri, 24 Jan 2025 22:17:19 -0500 Subject: [PATCH 23/53] Fixed capacity scraper to scrape from new URL --- src/models/checkin.py | 24 +++++++++++++++++ src/scrapers/capacities_scraper.py | 43 ++++++++++++++++++++++++++++-- src/utils/constants.py | 4 ++- 3 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 src/models/checkin.py diff --git a/src/models/checkin.py b/src/models/checkin.py new file mode 100644 index 0000000..93c932a --- /dev/null +++ b/src/models/checkin.py @@ -0,0 +1,24 @@ +from sqlalchemy import Column, Integer, ForeignKey, DateTime +from sqlalchemy.orm import relationship +from src.database import Base + +class Checkin(Base): + """ + A checkin object + + Attributes: + - `id` The ID of the checkin. + - `created_at` The date and time the user checked in. + - `user_id` The ID of the user who checked in. + - `gym_id` The ID of the gym the user checked into. + """ + + __tablename__ = "checkin" + + id = Column(Integer, primary_key=True) + created_at = Column(DateTime, nullable=False) + gym_id = Column(Integer, ForeignKey("gym.id"), nullable=False) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + # Make relationship with gym and user + gym = relationship("Gym", back_populates="checkins") + user = relationship("User", back_populates="checkins") diff --git a/src/scrapers/capacities_scraper.py b/src/scrapers/capacities_scraper.py index bc269ff..d5ca109 100644 --- a/src/scrapers/capacities_scraper.py +++ b/src/scrapers/capacities_scraper.py @@ -6,6 +6,7 @@ from src.models.capacity import Capacity from src.utils.constants import ( C2C_URL, + CRC_URL_NEW, CAPACITY_MARKER_COUNTS, CAPACITY_MARKER_NAMES, CAPACITY_MARKER_UPDATED, @@ -14,8 +15,8 @@ ) from src.utils.utils import get_facility_id, unix_time - -def fetch_capacities(): +# Legacy scraper from old webpage using CRC_URL +def fetch_capacities_old(): """ Fetch capacities for all facilities from Connect2Concepts. """ @@ -49,6 +50,44 @@ def fetch_capacities(): # Add to sheets add_single_capacity(count, facility_id, percent, updated) +# New scraper from new API using CRC_URL_NEW +def fetch_capacities(): + """Fetch capacities from the new JSON API endpoint.""" + try: + headers = { + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0" + } + + response = requests.get(CRC_URL_NEW, headers=headers) + facilities = response.json() + + for facility in facilities: + try: + facility_name = facility["LocationName"] + + # Map API name to database name + if facility_name not in CAPACITY_MARKER_NAMES: + print(f"Warning: No name mapping for facility: {facility_name}") + continue + + db_name = CAPACITY_MARKER_NAMES[facility_name] + facility_id = get_facility_id(db_name) + + count = int(facility["LastCount"]) + updated_str = facility["LastUpdatedDateAndTime"] + total_capacity = int(facility["TotalCapacity"]) + + percent = count / total_capacity if total_capacity > 0 else 0.0 + updated = datetime.strptime(updated_str.split(".")[0], "%Y-%m-%dT%H:%M:%S") + + add_single_capacity(count, facility_id, percent, updated) + + except Exception as e: + print(f"Error processing facility {facility.get('LocationName', 'unknown')}: {str(e)}") + + except Exception as e: + print(f"Error fetching capacities: {str(e)}") + raise def add_single_capacity(count, facility_id, percent, updated): """ diff --git a/src/utils/constants.py b/src/utils/constants.py index 58dc13d..50a2c08 100644 --- a/src/utils/constants.py +++ b/src/utils/constants.py @@ -6,8 +6,10 @@ # Base URL for Cornell Recreation Website BASE_URL = "https://scl.cornell.edu/recreation/" -# The path for capacities +# The old path for capacities C2C_URL = "https://connect2concepts.com/connect2/?type=bar&key=355de24d-d0e4-4262-ae97-bc0c78b92839&loc_status=false" +# The new path for capacities +CRC_URL_NEW = "https://goboardapi.azurewebsites.net/api/FacilityCount/GetCountsByAccount?AccountAPIKey=355de24d-d0e4-4262-ae97-bc0c78b92839" # The marker for counts in the HTML CAPACITY_MARKER_COUNTS = "Last Count: " From 5744933d42bb1fe272aeca2a5af2595f7a1f087a Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Fri, 24 Jan 2025 22:21:37 -0500 Subject: [PATCH 24/53] Removed new checkin model code, will push later --- src/models/checkin.py | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 src/models/checkin.py diff --git a/src/models/checkin.py b/src/models/checkin.py deleted file mode 100644 index 93c932a..0000000 --- a/src/models/checkin.py +++ /dev/null @@ -1,24 +0,0 @@ -from sqlalchemy import Column, Integer, ForeignKey, DateTime -from sqlalchemy.orm import relationship -from src.database import Base - -class Checkin(Base): - """ - A checkin object - - Attributes: - - `id` The ID of the checkin. - - `created_at` The date and time the user checked in. - - `user_id` The ID of the user who checked in. - - `gym_id` The ID of the gym the user checked into. - """ - - __tablename__ = "checkin" - - id = Column(Integer, primary_key=True) - created_at = Column(DateTime, nullable=False) - gym_id = Column(Integer, ForeignKey("gym.id"), nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=False) - # Make relationship with gym and user - gym = relationship("Gym", back_populates="checkins") - user = relationship("User", back_populates="checkins") From d59b97c0d4d4a00f9f44c3e50381ad23ad9b1a1f Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Tue, 4 Feb 2025 20:17:52 -0500 Subject: [PATCH 25/53] initial commit --- app.py | 13 ++++++++- schema.graphql | 15 ++++++++-- src/models/enums.py | 22 ++++++++++++++ src/models/hourly_average_capacity.py | 41 +++++++++++++++++++++++++++ src/models/user.py | 18 ++---------- src/schema.py | 31 ++++++++++++++++++-- src/scrapers/capacities_scraper.py | 33 +++++++++++++++++++++ 7 files changed, 153 insertions(+), 20 deletions(-) create mode 100644 src/models/enums.py create mode 100644 src/models/hourly_average_capacity.py diff --git a/app.py b/app.py index da6790f..22598bd 100644 --- a/app.py +++ b/app.py @@ -1,6 +1,7 @@ import logging import os import sentry_sdk +from datetime import datetime from flask import Flask, render_template from graphene import Schema from graphql.utils import schema_printer @@ -23,7 +24,7 @@ # Only import scraping-related modules if not in migration mode if not FLASK_MIGRATE: from flask_apscheduler import APScheduler - from src.scrapers.capacities_scraper import fetch_capacities + from src.scrapers.capacities_scraper import fetch_capacities, update_hourly_capacity from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility from src.scrapers.scraper_helpers import clean_past_hours from src.scrapers.sp_hours_scraper import fetch_sp_facility @@ -123,6 +124,16 @@ def scrape_classes(): except Exception as e: logging.error(f"Error in scrape_classes: {e}") + # Update hourly average capacity every hour + @scheduler.task("cron", id="update_capacity", minute="*") + def scheduled_job(): + current_time = datetime.now() + current_day = current_time.strftime("%A").upper() + current_hour = current_time.hour + + logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") + update_hourly_capacity(current_day, current_hour) + # Create database init_db() diff --git a/schema.graphql b/schema.graphql index d580ec9..b97c2ad 100644 --- a/schema.graphql +++ b/schema.graphql @@ -71,7 +71,7 @@ type CreateReport { scalar DateTime -enum DayOfWeekEnum { +enum DayOfWeekGraphQLEnum { MONDAY TUESDAY WEDNESDAY @@ -137,6 +137,15 @@ type Gym { reports: [Report] } +type HourlyAverageCapacity { + id: ID! + facilityId: Int! + averagePercent: Float! + hourOfDay: Int! + dayOfWeek: DayOfWeekGraphQLEnum + history: [Float]! +} + enum MuscleGroup { ABDOMINALS CHEST @@ -189,11 +198,13 @@ enum PriceType { type Query { getAllGyms: [Gym] + getUserByNetId(netId: String): [User] getUsersByGiveawayId(id: Int): [User] getWeeklyWorkoutDays(id: Int): [String] getWorkoutsById(id: Int): [Workout] activities: [Activity] getAllReports: [Report] + getHourlyAverageCapacitiesByFacilityId(facilityId: Int): [HourlyAverageCapacity] } type Report { @@ -220,7 +231,7 @@ type User { email: String netId: String! name: String! - workoutGoal: [DayOfWeekEnum] + workoutGoal: [DayOfWeekGraphQLEnum] giveaways: [Giveaway] reports: [Report] } diff --git a/src/models/enums.py b/src/models/enums.py new file mode 100644 index 0000000..b88465b --- /dev/null +++ b/src/models/enums.py @@ -0,0 +1,22 @@ +import enum +from graphene import Enum as GrapheneEnum + +# SQLAlchemy Enum +class DayOfWeekEnum(enum.Enum): + MONDAY = "MONDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + THURSDAY = "THURSDAY" + FRIDAY = "FRIDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + +# GraphQL Enum +class DayOfWeekGraphQLEnum(GrapheneEnum): + MONDAY = "MONDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + THURSDAY = "THURSDAY" + FRIDAY = "FRIDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" \ No newline at end of file diff --git a/src/models/hourly_average_capacity.py b/src/models/hourly_average_capacity.py new file mode 100644 index 0000000..a4f90c8 --- /dev/null +++ b/src/models/hourly_average_capacity.py @@ -0,0 +1,41 @@ +from sqlalchemy import Column, Integer, Float, ForeignKey, ARRAY, Enum +from src.models.enums import DayOfWeekEnum +from src.database import Base +from sqlalchemy.types import Numeric +from decimal import Decimal + + +class HourlyAverageCapacity(Base): + """ + Stores the average hourly capacity of a facility over the past 30 days. + + Attributes: + - `id` The ID of the hourly capacity record. + - `facility_id` The ID of the facility this capacity record belongs to. + - `average_percent` Average percent capacity of the facility, represented as a float between 0.0 and 1.0 + - `hour_of_day` The hour of the day this average is recorded for, in 24-hour format. + - `day_of_week` The day of the week this average is recorded for + - `history` Stores previous capacity data for this hour from (up to) the past 30 days. + """ + + __tablename__ = "hourly_average_capacity" + + id = Column(Integer, primary_key=True) + facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) + average_percent = Column(Float, nullable=False) + hour_of_day = Column(Integer, nullable=False) + day_of_week = Column(Enum(DayOfWeekEnum)) + history = Column(ARRAY(Numeric), nullable=False, default=[]) + + def update_hourly_average(self, current_percent): + new_capacity = Decimal(current_percent).quantize(Decimal('0.01')) + + if len(self.history) >= 30: + self.history = self.history[-29:] # Keep 29 newest records + + self.history = self.history + [new_capacity] if self.history else [new_capacity] + + total = 0 + for capacity in self.history: + total += capacity + self.average_percent = total / len(self.history) \ No newline at end of file diff --git a/src/models/user.py b/src/models/user.py index a442f30..3d7939c 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -1,19 +1,7 @@ -from sqlalchemy import Column, Integer, String, ARRAY -from sqlalchemy import Enum as SQLAEnum +from sqlalchemy import Column, Integer, String, ARRAY, Enum from sqlalchemy.orm import backref, relationship from src.database import Base -from enum import Enum - - -class DayOfWeekEnum(Enum): - MONDAY = "Monday" - TUESDAY = "Tuesday" - WEDNESDAY = "Wednesday" - THURSDAY = "Thursday" - FRIDAY = "Friday" - SATURDAY = "Saturday" - SUNDAY = "Sunday" - +from src.models.enums import DayOfWeekEnum class User(Base): """ @@ -37,4 +25,4 @@ class User(Base): reports = relationship("Report", back_populates="user") net_id = Column(String, nullable=False) name = Column(String, nullable=False) - workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) + workout_goal = Column(ARRAY(Enum(DayOfWeekEnum)), nullable=True) diff --git a/src/schema.py b/src/schema.py index c5e860d..3e973fb 100644 --- a/src/schema.py +++ b/src/schema.py @@ -12,11 +12,12 @@ from src.models.classes import Class as ClassModel from src.models.classes import ClassInstance as ClassInstanceModel from src.models.user import User as UserModel -from src.models.user import DayOfWeekEnum +from src.models.enums import DayOfWeekGraphQLEnum from src.models.giveaway import Giveaway as GiveawayModel from src.models.giveaway import GiveawayInstance as GiveawayInstanceModel from src.models.workout import Workout as WorkoutModel from src.models.report import Report as ReportModel +from src.models.hourly_average_capacity import HourlyAverageCapacity as HourlyAverageCapacityModel from src.database import db_session @@ -115,6 +116,14 @@ class Meta: model = CapacityModel +#MARK - Hourly Average Capacity +class HourlyAverageCapacity(SQLAlchemyObjectType): + class Meta: + model = HourlyAverageCapacityModel + + day_of_week = graphene.Field(DayOfWeekGraphQLEnum) + + # MARK: - Price @@ -176,6 +185,7 @@ def resolve_pricing(self, info): class User(SQLAlchemyObjectType): class Meta: model = UserModel + workout_goal = graphene.List(DayOfWeekGraphQLEnum) class UserInput(graphene.InputObjectType): @@ -228,6 +238,7 @@ def resolve_user(self, info): class Query(graphene.ObjectType): get_all_gyms = graphene.List(Gym, description="Get all gyms.") + get_user_by_net_id = graphene.List(User, net_id=graphene.String(), description="Get user by Net ID.") get_users_by_giveaway_id = graphene.List(User, id=graphene.Int(), description="Get all users given a giveaway ID.") get_weekly_workout_days = graphene.List( graphene.String, id=graphene.Int(), description="Get the days a user worked out for the current week." @@ -235,6 +246,9 @@ class Query(graphene.ObjectType): get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) get_all_reports = graphene.List(Report, description="Get all reports.") + get_hourly_average_capacities_by_facility_id = graphene.List( + HourlyAverageCapacity, facility_id=graphene.Int(), description="Get all facility hourly average capacities." + ) def resolve_get_all_gyms(self, info): query = Gym.get_query(info) @@ -243,6 +257,12 @@ def resolve_get_all_gyms(self, info): def resolve_activities(self, info): query = Activity.get_query(info) return query.all() + + def resolve_get_user_by_net_id(self, info, net_id): + user = User.get_query(info).filter(UserModel.net_id == net_id).all() + if not user: + raise GraphQLError("User with the given Net ID does not exist.") + return user def resolve_get_users_by_giveaway_id(self, info, id): entries = GiveawayInstance.get_query(info).filter(GiveawayInstanceModel.giveaway_id == id).all() @@ -281,6 +301,13 @@ def resolve_get_weekly_workout_days(self, info, id): def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query + + def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): + valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] + if facility_id not in valid_facility_ids: + raise GraphQLError("Invalid facility ID.") + query = HourlyAverageCapacity.get_query(info).filter(HourlyAverageCapacityModel.facility_id == facility_id) + return query.all() # MARK: - Mutation @@ -377,7 +404,7 @@ def mutate(self, info, user_id, workout_goal): for day in workout_goal: try: # Convert string to enum - validated_workout_goal.append(DayOfWeekEnum[day.upper()]) + validated_workout_goal.append(DayOfWeekGraphQLEnum[day.upper()].value) except KeyError: raise GraphQLError(f"Invalid day of the week: {day}") diff --git a/src/scrapers/capacities_scraper.py b/src/scrapers/capacities_scraper.py index d5ca109..b979793 100644 --- a/src/scrapers/capacities_scraper.py +++ b/src/scrapers/capacities_scraper.py @@ -4,6 +4,8 @@ from datetime import datetime from src.database import db_session from src.models.capacity import Capacity +from src.models.hourly_average_capacity import HourlyAverageCapacity +from src.models.enums import DayOfWeekEnum from src.utils.constants import ( C2C_URL, CRC_URL_NEW, @@ -124,3 +126,34 @@ def get_capacity_datetime(time_str): format = "%m/%d/%Y %I:%M %p" time_obj = datetime.strptime(time_str, format) return time_obj + + +def update_hourly_capacity(curDay, curHour): + print("running") + """ + Update hourly average capacity every hour based on collected data. + """ + currentCapacities = db_session.query(Capacity).all() + + for capacity in currentCapacities: + try: + hourly_average_capacity = db_session.query(HourlyAverageCapacity).filter(HourlyAverageCapacity.facility_id == capacity.facility_id, HourlyAverageCapacity.day_of_week == DayOfWeekEnum[curDay].value, HourlyAverageCapacity.hour_of_day == curHour).first() + + if hourly_average_capacity is not None: + print("updating average") + hourly_average_capacity.update_hourly_average(capacity.percent) + else: + print("No hourly capacity, creating new entry") + hourly_average_capacity = HourlyAverageCapacity( + facility_id=capacity.facility_id, + average_percent=capacity.percent, + hour_of_day=curHour, + day_of_week=DayOfWeekEnum[curDay].value, + history=[capacity.percent] + ) + + db_session.merge(hourly_average_capacity) + db_session.commit() + + except Exception as e: + print(f"Error updating hourly average: {e}") \ No newline at end of file From 5e0e077c713840acb9d45b11999f6c3de2baca4d Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Wed, 12 Feb 2025 01:10:54 -0500 Subject: [PATCH 26/53] add migration --- app.py | 2 +- .../versions/c3a3274d78a1_new_migration.py | 45 +++++++++++++++++++ src/models/activity.py | 2 +- 3 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 migrations/versions/c3a3274d78a1_new_migration.py diff --git a/app.py b/app.py index 22598bd..6ca6c48 100644 --- a/app.py +++ b/app.py @@ -125,7 +125,7 @@ def scrape_classes(): logging.error(f"Error in scrape_classes: {e}") # Update hourly average capacity every hour - @scheduler.task("cron", id="update_capacity", minute="*") + @scheduler.task("cron", id="update_capacity", hour="*") def scheduled_job(): current_time = datetime.now() current_day = current_time.strftime("%A").upper() diff --git a/migrations/versions/c3a3274d78a1_new_migration.py b/migrations/versions/c3a3274d78a1_new_migration.py new file mode 100644 index 0000000..7120855 --- /dev/null +++ b/migrations/versions/c3a3274d78a1_new_migration.py @@ -0,0 +1,45 @@ +"""new migration + +Revision ID: c3a3274d78a1 +Revises: 24684343da0f +Create Date: 2025-02-05 18:29:56.359089 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'c3a3274d78a1' +down_revision = '24684343da0f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + + op.create_table( + 'hourly_average_capacity', + sa.Column('id', sa.Integer(), primary_key=True), + sa.Column('facility_id', sa.Integer(), sa.ForeignKey("facility.id"), nullable=False), + sa.Column('average_percent', sa.Float(), nullable=False), + sa.Column('hour_of_day', sa.Integer(), nullable=False), + sa.Column('day_of_week', sa.Enum('Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', + name='dayofweekenum'), nullable=True), + sa.Column('history', postgresql.ARRAY(sa.Numeric()), nullable=False, server_default='{}'), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=False) + + op.drop_table('hourly_average_capacity') + # ### end Alembic commands ### diff --git a/src/models/activity.py b/src/models/activity.py index 27e86fe..41a5c99 100644 --- a/src/models/activity.py +++ b/src/models/activity.py @@ -74,7 +74,7 @@ class Price(Base): id = Column(Integer, primary_key=True) activity_id = Column(Integer, ForeignKey("activity.id"), nullable=False) - cost = Column(Float, nullable=-False) + cost = Column(Float, nullable=False) name = Column(String, nullable=False) rate = Column(String) type = Column(Enum(PriceType), nullable=False) From 1877f58d53bb080af01b6691170ee0021ae0533d Mon Sep 17 00:00:00 2001 From: sophiestrausberg <68089631+sophiestrausberg@users.noreply.github.com> Date: Wed, 12 Feb 2025 01:23:34 -0500 Subject: [PATCH 27/53] Update capacities_scraper.py --- src/scrapers/capacities_scraper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/scrapers/capacities_scraper.py b/src/scrapers/capacities_scraper.py index b979793..8e58857 100644 --- a/src/scrapers/capacities_scraper.py +++ b/src/scrapers/capacities_scraper.py @@ -129,7 +129,6 @@ def get_capacity_datetime(time_str): def update_hourly_capacity(curDay, curHour): - print("running") """ Update hourly average capacity every hour based on collected data. """ @@ -156,4 +155,5 @@ def update_hourly_capacity(curDay, curHour): db_session.commit() except Exception as e: - print(f"Error updating hourly average: {e}") \ No newline at end of file + print(f"Error updating hourly average: {e}") + From 9067a309a7787f131e99d0d2c1375d74fb8af6e2 Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Wed, 12 Feb 2025 19:54:46 -0500 Subject: [PATCH 28/53] Added user streaks including their current and max streaks --- schema.graphql | 7 ++++++ src/models/user.py | 4 ++++ src/schema.py | 57 ++++++++++++++++++++++++++++++++++++++++++++++ src/utils/utils.py | 52 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 120 insertions(+) diff --git a/schema.graphql b/schema.graphql index d580ec9..e4dc2c8 100644 --- a/schema.graphql +++ b/schema.graphql @@ -137,6 +137,8 @@ type Gym { reports: [Report] } +scalar JSONString + enum MuscleGroup { ABDOMINALS CHEST @@ -194,6 +196,8 @@ type Query { getWorkoutsById(id: Int): [Workout] activities: [Activity] getAllReports: [Report] + getWorkoutGoals(id: Int!): [String] + getUserStreak(id: Int!): JSONString } type Report { @@ -221,8 +225,11 @@ type User { netId: String! name: String! workoutGoal: [DayOfWeekEnum] + activeStreak: Int + maxStreak: Int giveaways: [Giveaway] reports: [Report] + currentStreak: Int } type Workout { diff --git a/src/models/user.py b/src/models/user.py index a442f30..0437b1a 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -27,6 +27,8 @@ class User(Base): - `reports` The list of reports a user has submitted. - `name` The user's name. - `workout_goal` The days of the week the user has set as their personal goal. + - `active_streak` The number of weeks the user has met their personal goal. + - `workout_goal` The max number of weeks the user has met their personal goal. """ __tablename__ = "users" @@ -38,3 +40,5 @@ class User(Base): net_id = Column(String, nullable=False) name = Column(String, nullable=False) workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) + active_streak = Column(Integer, nullable=True) + max_streak = Column(Integer, nullable=True) diff --git a/src/schema.py b/src/schema.py index c5e860d..3bee497 100644 --- a/src/schema.py +++ b/src/schema.py @@ -176,6 +176,15 @@ def resolve_pricing(self, info): class User(SQLAlchemyObjectType): class Meta: model = UserModel + + current_streak = graphene.Int(description="The user's current workout streak in days.") + max_streak = graphene.Int(description="The user's maximum workout streak.") + + def resolve_current_streak(self, info): + return self.current_streak + + def resolve_max_streak(self, info): + return self.max_streak class UserInput(graphene.InputObjectType): @@ -235,6 +244,8 @@ class Query(graphene.ObjectType): get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) get_all_reports = graphene.List(Report, description="Get all reports.") + get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), description="Get the workout goals of a user by ID.") + get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int(required=True), description="Get the current and max workout streak of a user.") def resolve_get_all_gyms(self, info): query = Gym.get_query(info) @@ -281,6 +292,52 @@ def resolve_get_weekly_workout_days(self, info, id): def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query + + def resolve_get_workout_goals(self, info, id): + user = User.get_query(info).filter(UserModel.id == id).first() + if not user: + raise GraphQLError("User with the given ID does not exist.") + + return [day.value for day in user.workout_goal] if user.workout_goal else [] + + def resolve_get_user_streak(self, info, id): + user = User.get_query(info).filter(UserModel.id == id).first() + if not user: + raise GraphQLError("User with the given ID does not exist.") + + workouts = ( + Workout.get_query(info) + .filter(WorkoutModel.user_id == user.id) + .order_by(WorkoutModel.workout_time.desc()) + .all() + ) + + if not workouts: + return {"current_streak": 0, "max_streak": 0} + + workout_dates = {workout.workout_time.date() for workout in workouts} + sorted_dates = sorted(workout_dates, reverse=True) + + today = datetime.utcnow().date() + current_streak = 0 + max_streak = 0 + streak = 0 + prev_date = None + + for date in sorted_dates: + if prev_date and (prev_date - date).days > 1: + max_streak = max(max_streak, streak) + streak = 0 + + streak += 1 + prev_date = date + + if date == today or (date == today - timedelta(days=1) and current_streak == 0): + current_streak = streak + + max_streak = max(max_streak, streak) + + return {"current_streak": current_streak, "max_streak": max_streak} # MARK: - Mutation diff --git a/src/utils/utils.py b/src/utils/utils.py index 5acb36c..e46ab6e 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -7,6 +7,7 @@ from src.models.gym import Gym from src.models.facility import Facility, FacilityType from src.models.amenity import Amenity, AmenityType +from src.models.workout import Workout from src.utils.constants import ASSET_BASE_URL, EASTERN_TIMEZONE @@ -140,3 +141,54 @@ def get_facility_id(name): """ facility = Facility.query.filter_by(name=name).first() return facility.id + +def calculate_streaks(user, workouts, workout_goal): + """ + Calculate the current and maximum workout streaks for a user. + + Parameters: + - `user` The user object. + - `workouts` The user's list of completed workouts. + - `workout_goal` A list of goal days (e.g., ['Monday', 'Wednesday']). + + Returns: + - Updates `user.current_streak` and `user.max_streak`. + """ + if not workouts: + user.current_streak = 0 + user.max_streak = user.max_streak or 0 + return + + # Convert goal days to set of weekday numbers (Monday=0, Sunday=6) + goal_days = {time.strptime(day, "%A").tm_wday for day in workout_goal} + + # Filter workouts to only include those on goal days + valid_workouts = [w for w in workouts if w.workout_time.weekday() in goal_days] + + # Sort by workout date + valid_workouts.sort(key=lambda x: x.workout_time) + + current_streak = 1 + max_streak = user.max_streak or 0 + + for i in range(1, len(valid_workouts)): + prev_day = valid_workouts[i - 1].workout_time + curr_day = valid_workouts[i].workout_time + + # Find the next expected goal day + expected_next_day = prev_day + timedelta(days=1) + while expected_next_day.weekday() not in goal_days: + expected_next_day += timedelta(days=1) + + # Check if current workout is on the expected next goal day + if curr_day.date() == expected_next_day.date(): + current_streak += 1 + else: + max_streak = max(max_streak, current_streak) + current_streak = 1 + + # Final update + max_streak = max(max_streak, current_streak) + user.current_streak = current_streak + user.max_streak = max_streak + From c94458300c53af105cd56af025bc54c0e71e87af Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Thu, 13 Feb 2025 09:19:42 -0500 Subject: [PATCH 29/53] Removed users from reports - Removed users in reports model and schema - Added migration to remove user and reports connection - Changed one gym equipment category from misc --- .../30d67980c5af_remove_user_from_reports.py | 36 +++++++++++++++++++ schema.graphql | 5 +-- src/models/report.py | 3 -- src/models/user.py | 2 -- src/schema.py | 14 ++------ src/utils/equipment_labels.json | 2 +- 6 files changed, 40 insertions(+), 22 deletions(-) create mode 100644 migrations/versions/30d67980c5af_remove_user_from_reports.py diff --git a/migrations/versions/30d67980c5af_remove_user_from_reports.py b/migrations/versions/30d67980c5af_remove_user_from_reports.py new file mode 100644 index 0000000..41fb249 --- /dev/null +++ b/migrations/versions/30d67980c5af_remove_user_from_reports.py @@ -0,0 +1,36 @@ +"""Remove user from reports + +Revision ID: 30d67980c5af +Revises: 24684343da0f +Create Date: 2025-02-13 09:07:17.012872 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '30d67980c5af' +down_revision = '24684343da0f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + op.drop_constraint('report_user_id_fkey', 'report', type_='foreignkey') + op.drop_column('report', 'user_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('report', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) + op.create_foreign_key('report_user_id_fkey', 'report', 'users', ['user_id'], ['id']) + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=False) + # ### end Alembic commands ### diff --git a/schema.graphql b/schema.graphql index d580ec9..5636e16 100644 --- a/schema.graphql +++ b/schema.graphql @@ -158,7 +158,7 @@ type Mutation { enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(userId: Int!, workoutTime: DateTime!): Workout - createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!, userId: Int!): CreateReport + createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport } type OpenHours { @@ -202,9 +202,7 @@ type Report { description: String! gymId: Int! issue: ReportType! - userId: Int! gym: Gym - user: User } enum ReportType { @@ -222,7 +220,6 @@ type User { name: String! workoutGoal: [DayOfWeekEnum] giveaways: [Giveaway] - reports: [Report] } type Workout { diff --git a/src/models/report.py b/src/models/report.py index f7a7dfa..da8f84d 100644 --- a/src/models/report.py +++ b/src/models/report.py @@ -16,7 +16,6 @@ class Report(Base): Attributes: - `id` The ID of the report. - - `user_id` The ID of the user who created the report. - `issue` The issue reported (discrete options). - `description` The description of the report. - `created_at` The date and time the report was created. @@ -30,7 +29,5 @@ class Report(Base): description = Column(String, nullable=False) # Text input gym_id = Column(Integer, ForeignKey("gym.id"), nullable=False) # One to many relationship with gym issue = Column(Enum(ReportType), nullable=False) # Discrete options (enumerate) - user_id = Column(Integer, ForeignKey("users.id"), nullable=False) # Make relationship with gym and user gym = relationship("Gym", back_populates="reports") - user = relationship("User", back_populates="reports") diff --git a/src/models/user.py b/src/models/user.py index a442f30..45507e7 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -24,7 +24,6 @@ class User(Base): - `email` The user's email address. - `giveaways` (nullable) The list of giveaways a user is entered into. - `net_id` The user's Net ID. - - `reports` The list of reports a user has submitted. - `name` The user's name. - `workout_goal` The days of the week the user has set as their personal goal. """ @@ -34,7 +33,6 @@ class User(Base): id = Column(Integer, primary_key=True) email = Column(String, nullable=True) giveaways = relationship("Giveaway", secondary="giveaway_instance", back_populates="users") - reports = relationship("Report", back_populates="user") net_id = Column(String, nullable=False) name = Column(String, nullable=False) workout_goal = Column(ARRAY(SQLAEnum(DayOfWeekEnum)), nullable=True) diff --git a/src/schema.py b/src/schema.py index c5e860d..d828e8e 100644 --- a/src/schema.py +++ b/src/schema.py @@ -213,16 +213,11 @@ class Meta: model = ReportModel gym = graphene.Field(lambda: Gym) - user = graphene.Field(lambda: User) def resolve_gym(self, info): query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() return query - def resolve_user(self, info): - query = User.get_query(info).filter(UserModel.id == self.user_id).first() - return query - # MARK: - Query @@ -408,7 +403,6 @@ def mutate(self, info, workout_time, user_id): class CreateReport(graphene.Mutation): class Arguments: - user_id = graphene.Int(required=True) issue = graphene.String(required=True) description = graphene.String(required=True) created_at = graphene.DateTime(required=True) @@ -416,11 +410,7 @@ class Arguments: report = graphene.Field(Report) - def mutate(self, info, description, user_id, issue, created_at, gym_id): - # Check if user exists - user = User.get_query(info).filter(UserModel.id == user_id).first() - if not user: - raise GraphQLError("User with given ID does not exist.") + def mutate(self, info, description, issue, created_at, gym_id): # Check if gym exists gym = Gym.get_query(info).filter(GymModel.id == gym_id).first() if not gym: @@ -428,7 +418,7 @@ def mutate(self, info, description, user_id, issue, created_at, gym_id): # Check if issue is a valid enumeration if issue not in ["INACCURATE_EQUIPMENT", "INCORRECT_HOURS", "INACCURATE_DESCRIPTION", "WAIT_TIMES_NOT_UPDATED", "OTHER"]: raise GraphQLError("Issue is not a valid enumeration.") - report = ReportModel(description=description, user_id=user_id, issue=issue, + report = ReportModel(description=description, issue=issue, created_at=created_at, gym_id=gym_id) db_session.add(report) db_session.commit() diff --git a/src/utils/equipment_labels.json b/src/utils/equipment_labels.json index 7f3d3bd..4943a3a 100644 --- a/src/utils/equipment_labels.json +++ b/src/utils/equipment_labels.json @@ -289,7 +289,7 @@ }, "Precor Hi/Lo Pulley": { "clean_name": "Hi/Lo Pulley", - "label": ["MISCELLANEOUS"] + "label": ["TRICEPS", "BICEPS", "SHOULDERS", "CHEST"] }, "Precor Leg Press": { "clean_name": "Leg Press", From dcfeb010b5dc4ce0a685aaf5c1a75c40b71b527e Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Sat, 15 Feb 2025 01:36:23 -0500 Subject: [PATCH 30/53] add error handling to update_hourly_capacity call --- app.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/app.py b/app.py index 6ca6c48..ae19fda 100644 --- a/app.py +++ b/app.py @@ -16,7 +16,7 @@ # Check if we're in migration mode with error handling try: - FLASK_MIGRATE = os.getenv('FLASK_MIGRATE', 'false').lower() == 'true' + FLASK_MIGRATE = os.getenv("FLASK_MIGRATE", "false").lower() == "true" except Exception as e: logging.warning(f"Error reading FLASK_MIGRATE environment variable: {e}. Defaulting to false.") FLASK_MIGRATE = False @@ -45,16 +45,16 @@ # Verify all required variables are present if not all([db_user, db_password, db_name, db_host, db_port]): raise ValueError( - "Missing required database configuration. " - "Please ensure all database environment variables are set." + "Missing required database configuration. " "Please ensure all database environment variables are set." ) -app.config['SQLALCHEMY_DATABASE_URI'] = db_url -app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False +app.config["SQLALCHEMY_DATABASE_URI"] = db_url +app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False migrate = Migrate(app, db) schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) + def should_run_initial_scrape(): """ Check if we should run initial scraping: @@ -65,10 +65,11 @@ def should_run_initial_scrape(): if FLASK_MIGRATE: return False # Check if we're in the main process - werkzeug_var = os.environ.get('WERKZEUG_RUN_MAIN') + werkzeug_var = os.environ.get("WERKZEUG_RUN_MAIN") # Logic: if in local, then werkzeug_var exists: so only run when true to prevent double running # If in Gunicorn, then werkzeug_var is None, so then it will also run - return werkzeug_var is None or werkzeug_var == 'true' + return werkzeug_var is None or werkzeug_var == "true" + # Initialize scheduler only if not in migration mode if not FLASK_MIGRATE: @@ -80,16 +81,20 @@ def should_run_initial_scrape(): # Logging logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") + @app.route("/") def index(): return render_template("index.html") + app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) + @app.teardown_appcontext def shutdown_session(exception=None): db_session.remove() + # Only define scheduler tasks if not in migration mode if not FLASK_MIGRATE: # Scrape hours every 15 minutes @@ -131,8 +136,12 @@ def scheduled_job(): current_day = current_time.strftime("%A").upper() current_hour = current_time.hour - logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") - update_hourly_capacity(current_day, current_hour) + try: + logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") + update_hourly_capacity(current_day, current_hour) + except Exception as e: + logging.error(f"Error updating hourly average capacity for {current_day}, hour {current_hour}: {e}") + # Create database init_db() From 4dc638ba3b5d07e10afca79b03c7403494a68899 Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Sat, 15 Feb 2025 16:45:00 -0500 Subject: [PATCH 31/53] Added a couple lines to account for special hours for when facilities are closed --- src/scrapers/sp_hours_scraper.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/scrapers/sp_hours_scraper.py b/src/scrapers/sp_hours_scraper.py index 2ed147f..01e2af0 100644 --- a/src/scrapers/sp_hours_scraper.py +++ b/src/scrapers/sp_hours_scraper.py @@ -49,6 +49,8 @@ def fetch_sp_facility(): clean_hours(date, get_facility_id(name)) if hours != MARKER_CLOSED: parse_special_hours(hours, type, date, get_facility_id(name)) + else: + add_special_facility_hours(date, date, get_facility_id(name)) # MARK: Helpers @@ -100,6 +102,8 @@ def add_special_facility_hours(start_time, end_time, facility_id, court_type=Non # Convert datetime objects to Unix start_unix = unix_time(start_time) end_unix = unix_time(end_time) + + print(f"Adding special hours: start_unix={start_unix}, end_unix={end_unix}, facility_id={facility_id}, is_special=True") # Create hours hrs = OpenHours( @@ -115,3 +119,4 @@ def add_special_facility_hours(start_time, end_time, facility_id, court_type=Non # Add to database db_session.merge(hrs) db_session.commit() + print(f"Committed special hours for facility_id={facility_id}") From c881c97a8c8c82a19922604799ad920a4eeb7ecd Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 25 Feb 2025 23:16:36 -0500 Subject: [PATCH 32/53] Refactored app running and migrations - Created app factory to handle migrations and scrapers - Moved migrations running from workflow file to dockerfile - Created migrations.py to run migrations - Changed app.py to call app factory --- .github/workflows/deploy-dev.yml | 14 --- Dockerfile | 3 +- app.py | 168 +------------------------------ app_factory.py | 146 +++++++++++++++++++++++++++ migrations.py | 4 + 5 files changed, 154 insertions(+), 181 deletions(-) create mode 100644 app_factory.py create mode 100644 migrations.py diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index a5fb051..da074eb 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -46,18 +46,4 @@ jobs: sudo systemctl restart nginx docker stack deploy -c docker-compose.yml the-stack --with-registry-auth sleep 60s - docker exec $(docker ps -q -f name=the-stack_web) psql $DB_URL -c "DELETE FROM alembic_version;" - docker exec $(docker ps -q -f name=the-stack_web) /bin/sh -c "FLASK_MIGRATE=true flask db stamp head" - attempt=1 - max_attempts=3 - until docker exec $(docker ps -q -f name=the-stack_web) /bin/sh -c "FLASK_MIGRATE=true flask db upgrade" || [ $attempt -eq $max_attempts ] - do - echo "Migration attempt $attempt failed. Retrying..." - sleep 15 - attempt=$((attempt + 1)) - done - if [ $attempt -eq $max_attempts ]; then - echo "Migration failed after $max_attempts attempts" - exit 1 - fi yes | docker system prune -a diff --git a/Dockerfile b/Dockerfile index 32b14d2..30de6de 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,9 @@ FROM python:3.9 ENV TZ="America/New_York" -ENV FLASK_MIGRATE=false RUN mkdir -p /usr/src/app WORKDIR /usr/src/app COPY . . ENV MAX_CONCURRENT_PIP=4 RUN pip3 install --upgrade pip RUN pip3 install --exists-action w -r requirements.txt -CMD python3 app.py +CMD flask --app migrations db upgrade && python3 app.py \ No newline at end of file diff --git a/app.py b/app.py index ae19fda..58d56ea 100644 --- a/app.py +++ b/app.py @@ -1,169 +1,7 @@ -import logging -import os -import sentry_sdk -from datetime import datetime -from flask import Flask, render_template -from graphene import Schema -from graphql.utils import schema_printer -from src.database import db_session, init_db -from src.database import Base as db -from src.database import db_url, db_user, db_password, db_name, db_host, db_port -from src.models.openhours import OpenHours -from flask_migrate import Migrate -from src.schema import Query, Mutation -from flasgger import Swagger -from flask_graphql import GraphQLView +from app_factory import create_app -# Check if we're in migration mode with error handling -try: - FLASK_MIGRATE = os.getenv("FLASK_MIGRATE", "false").lower() == "true" -except Exception as e: - logging.warning(f"Error reading FLASK_MIGRATE environment variable: {e}. Defaulting to false.") - FLASK_MIGRATE = False - -# Only import scraping-related modules if not in migration mode -if not FLASK_MIGRATE: - from flask_apscheduler import APScheduler - from src.scrapers.capacities_scraper import fetch_capacities, update_hourly_capacity - from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility - from src.scrapers.scraper_helpers import clean_past_hours - from src.scrapers.sp_hours_scraper import fetch_sp_facility - from src.scrapers.equipment_scraper import scrape_equipment - from src.scrapers.class_scraper import fetch_classes - from src.scrapers.activities_scraper import fetch_activity - from src.utils.utils import create_gym_table - -sentry_sdk.init( - dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", - traces_sample_rate=1.0, - profiles_sample_rate=1.0, -) - -app = Flask(__name__) -app.debug = True - -# Verify all required variables are present -if not all([db_user, db_password, db_name, db_host, db_port]): - raise ValueError( - "Missing required database configuration. " "Please ensure all database environment variables are set." - ) - -app.config["SQLALCHEMY_DATABASE_URI"] = db_url -app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False -migrate = Migrate(app, db) -schema = Schema(query=Query, mutation=Mutation) -swagger = Swagger(app) - - -def should_run_initial_scrape(): - """ - Check if we should run initial scraping: - - Not in migration mode - - Only in the main process (Werkzeug or Gunicorn) - """ - # If in migration mode, don't run initial scraping - if FLASK_MIGRATE: - return False - # Check if we're in the main process - werkzeug_var = os.environ.get("WERKZEUG_RUN_MAIN") - # Logic: if in local, then werkzeug_var exists: so only run when true to prevent double running - # If in Gunicorn, then werkzeug_var is None, so then it will also run - return werkzeug_var is None or werkzeug_var == "true" - - -# Initialize scheduler only if not in migration mode -if not FLASK_MIGRATE: - scheduler = APScheduler() - if should_run_initial_scrape(): # Only start scheduler in main process - scheduler.init_app(app) - scheduler.start() - -# Logging -logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") - - -@app.route("/") -def index(): - return render_template("index.html") - - -app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) - - -@app.teardown_appcontext -def shutdown_session(exception=None): - db_session.remove() - - -# Only define scheduler tasks if not in migration mode -if not FLASK_MIGRATE: - # Scrape hours every 15 minutes - @scheduler.task("interval", id="scrape_hours", seconds=900) - def scrape_hours(): - try: - logging.info("Scraping hours from sheets...") - # Clear hours - db_session.query(OpenHours).delete() - fetch_reg_facility() - fetch_reg_building() - fetch_sp_facility() - clean_past_hours() - except Exception as e: - logging.error(f"Error in scrape_hours: {e}") - - # Scrape capacities every 10 minutes - @scheduler.task("interval", id="scrape_capacities", seconds=600) - def scrape_capacities(): - try: - logging.info("Scraping capacities from C2C...") - fetch_capacities() - except Exception as e: - logging.error(f"Error in scrape_capacities: {e}") - - # Scrape classes every hour - @scheduler.task("interval", id="scrape_classes", seconds=3600) - def scrape_classes(): - try: - logging.info("Scraping classes from group-fitness-classes...") - fetch_classes(10) - except Exception as e: - logging.error(f"Error in scrape_classes: {e}") - - # Update hourly average capacity every hour - @scheduler.task("cron", id="update_capacity", hour="*") - def scheduled_job(): - current_time = datetime.now() - current_day = current_time.strftime("%A").upper() - current_hour = current_time.hour - - try: - logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") - update_hourly_capacity(current_day, current_hour) - except Exception as e: - logging.error(f"Error updating hourly average capacity for {current_day}, hour {current_hour}: {e}") - - -# Create database -init_db() - -# Run initial scraping only in main process and not in migration mode -if should_run_initial_scrape(): - logging.info("Running initial scraping...") - try: - create_gym_table() - scrape_classes() - scrape_hours() - scrape_capacities() - scrape_equipment() - logging.info("Scraping activities from sheets...") - fetch_activity() - except Exception as e: - logging.error(f"Error during initial scraping: {e}") - -# Create schema.graphql -with open("schema.graphql", "w+") as schema_file: - schema_file.write(schema_printer.print_schema(schema)) - schema_file.close() +# Create Flask app with scrapers enabled +app = create_app(run_migrations=False) if __name__ == "__main__": app.run(host="127.0.0.1", port=5000) diff --git a/app_factory.py b/app_factory.py new file mode 100644 index 0000000..b9058d6 --- /dev/null +++ b/app_factory.py @@ -0,0 +1,146 @@ +import logging +from flask import Flask, render_template +from graphene import Schema +from graphql.utils import schema_printer +from src.database import db_session, init_db +from src.database import Base as db +from src.database import db_url, db_user, db_password, db_name, db_host, db_port +from flask_migrate import Migrate +from src.schema import Query, Mutation +from flasgger import Swagger +from flask_graphql import GraphQLView +import sentry_sdk + +def create_app(run_migrations=False): + """ + Application factory for Flask app. + + Args: + run_migrations: If True, configure app for migrations only (no scrapers) + + Returns: + Configured Flask application + """ + # Initialize Sentry + sentry_sdk.init( + dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", + traces_sample_rate=1.0, + profiles_sample_rate=1.0, + ) + + # Create and configure Flask app + app = Flask(__name__) + app.debug = True + + # Verify all required database variables are present + if not all([db_user, db_password, db_name, db_host, db_port]): + raise ValueError( + "Missing required database configuration. " + "Please ensure all database environment variables are set." + ) + + # Configure database + app.config['SQLALCHEMY_DATABASE_URI'] = db_url + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + + # Set up extensions + migrate = Migrate(app, db) + schema = Schema(query=Query, mutation=Mutation) + swagger = Swagger(app) + + # Configure routes + @app.route("/") + def index(): + return render_template("index.html") + + app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) + + @app.teardown_appcontext + def shutdown_session(exception=None): + db_session.remove() + + # Initialize database + init_db() + + # Create schema.graphql + with open("schema.graphql", "w+") as schema_file: + schema_file.write(schema_printer.print_schema(schema)) + schema_file.close() + + # Configure and run scrapers if not in migration mode + if not run_migrations: + setup_scrapers(app) + + return app + +def setup_scrapers(app): + """Set up scrapers and scheduled tasks""" + # Import scraper-related modules only when needed + from flask_apscheduler import APScheduler + from src.scrapers.capacities_scraper import fetch_capacities + from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility + from src.scrapers.scraper_helpers import clean_past_hours + from src.scrapers.sp_hours_scraper import fetch_sp_facility + from src.scrapers.equipment_scraper import scrape_equipment + from src.scrapers.class_scraper import fetch_classes + from src.scrapers.activities_scraper import fetch_activity + from src.utils.utils import create_gym_table + from src.models.openhours import OpenHours + import os + + # Set up logging + logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S") + + # Initialize scheduler + scheduler = APScheduler() + + # Scrape hours every 15 minutes + @scheduler.task("interval", id="scrape_hours", seconds=900) + def scrape_hours(): + try: + logging.info("Scraping hours from sheets...") + # Clear hours + db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + except Exception as e: + logging.error(f"Error in scrape_hours: {e}") + + # Scrape capacities every 10 minutes + @scheduler.task("interval", id="scrape_capacities", seconds=600) + def scrape_capacities(): + try: + logging.info("Scraping capacities from C2C...") + fetch_capacities() + except Exception as e: + logging.error(f"Error in scrape_capacities: {e}") + + # Scrape classes every hour + @scheduler.task("interval", id="scrape_classes", seconds=3600) + def scrape_classes(): + try: + logging.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) + except Exception as e: + logging.error(f"Error in scrape_classes: {e}") + + # Initialize scheduler + scheduler.init_app(app) + scheduler.start() + + # Run initial scraping + logging.info("Running initial scraping...") + try: + create_gym_table() + scrape_classes() + scrape_hours() + scrape_capacities() + scrape_equipment() + logging.info("Scraping activities from sheets...") + fetch_activity() + except Exception as e: + logging.error(f"Error during initial scraping: {e}") diff --git a/migrations.py b/migrations.py new file mode 100644 index 0000000..5bf1f0a --- /dev/null +++ b/migrations.py @@ -0,0 +1,4 @@ +from app_factory import create_app + +# Create Flask app for migrations only (no scrapers) +app = create_app(run_migrations=True) From 5b364aba1aec12839d4a615aa49905f60cc3bac0 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Tue, 25 Feb 2025 23:43:46 -0500 Subject: [PATCH 33/53] Initialized Migrations - Initialized migrations folder - Added logging similar to old scraper to app factory --- app_factory.py | 79 ++++++++++-- migrations/env.py | 2 +- ...a0f_update_equipment_table_with_muscle_.py | 122 ------------------ .../30d67980c5af_remove_user_from_reports.py | 36 ------ .../versions/c3a3274d78a1_new_migration.py | 45 ------- 5 files changed, 70 insertions(+), 214 deletions(-) delete mode 100644 migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py delete mode 100644 migrations/versions/30d67980c5af_remove_user_from_reports.py delete mode 100644 migrations/versions/c3a3274d78a1_new_migration.py diff --git a/app_factory.py b/app_factory.py index b9058d6..d4752ec 100644 --- a/app_factory.py +++ b/app_factory.py @@ -11,6 +11,12 @@ from flask_graphql import GraphQLView import sentry_sdk +# Set up logging at module level +logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S") +logger = logging.getLogger(__name__) + def create_app(run_migrations=False): """ Application factory for Flask app. @@ -21,7 +27,10 @@ def create_app(run_migrations=False): Returns: Configured Flask application """ + logger.info("Initializing application") + # Initialize Sentry + logger.info("Configuring Sentry") sentry_sdk.init( dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", traces_sample_rate=1.0, @@ -31,24 +40,30 @@ def create_app(run_migrations=False): # Create and configure Flask app app = Flask(__name__) app.debug = True + logger.info("Flask app created with debug=%s", app.debug) # Verify all required database variables are present if not all([db_user, db_password, db_name, db_host, db_port]): + logger.error("Missing required database configuration variables") raise ValueError( "Missing required database configuration. " "Please ensure all database environment variables are set." ) # Configure database + logger.info("Configuring database connection to %s:%s/%s", db_host, db_port, db_name) app.config['SQLALCHEMY_DATABASE_URI'] = db_url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # Set up extensions + logger.info("Setting up Flask extensions") migrate = Migrate(app, db) schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) # Configure routes + logger.info("Configuring routes") + @app.route("/") def index(): return render_template("index.html") @@ -60,17 +75,23 @@ def shutdown_session(exception=None): db_session.remove() # Initialize database + logger.info("Initializing database") init_db() # Create schema.graphql + logger.info("Generating GraphQL schema file") with open("schema.graphql", "w+") as schema_file: schema_file.write(schema_printer.print_schema(schema)) schema_file.close() # Configure and run scrapers if not in migration mode if not run_migrations: + logger.info("Setting up scrapers and scheduled tasks") setup_scrapers(app) + else: + logger.info("Running in migration mode - scrapers disabled") + logger.info("Application initialization complete") return app def setup_scrapers(app): @@ -88,17 +109,19 @@ def setup_scrapers(app): from src.models.openhours import OpenHours import os - # Set up logging - logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", - level=logging.INFO, - datefmt="%Y-%m-%d %H:%M:%S") + logger = logging.getLogger(__name__) + logger.info("Beginning scraper configuration") # Initialize scheduler scheduler = APScheduler() + logger.info("APScheduler initialized") # Scrape hours every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) def scrape_hours(): + job = scheduler.get_job('scrape_hours') + next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" + logging.info("Running job \"scrape_hours (trigger: interval[0:15:00], next run at: %s EST)\"", next_run) try: logging.info("Scraping hours from sheets...") # Clear hours @@ -107,40 +130,76 @@ def scrape_hours(): fetch_reg_building() fetch_sp_facility() clean_past_hours() + logging.info( + "Job \"scrape_hours (trigger: interval[0:15:00], next run at: %s EST)\" executed successfully", next_run) except Exception as e: logging.error(f"Error in scrape_hours: {e}") # Scrape capacities every 10 minutes @scheduler.task("interval", id="scrape_capacities", seconds=600) def scrape_capacities(): + job = scheduler.get_job('scrape_capacities') + next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" + logging.info("Running job \"scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)\"", next_run) try: logging.info("Scraping capacities from C2C...") fetch_capacities() + logging.info( + "Job \"scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)\" executed successfully", next_run) except Exception as e: logging.error(f"Error in scrape_capacities: {e}") # Scrape classes every hour @scheduler.task("interval", id="scrape_classes", seconds=3600) def scrape_classes(): + job = scheduler.get_job('scrape_classes') + next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" + logging.info("Running job \"scrape_classes (trigger: interval[1:00:00], next run at: %s EST)\"", next_run) try: logging.info("Scraping classes from group-fitness-classes...") fetch_classes(10) + logging.info( + "Job \"scrape_classes (trigger: interval[1:00:00], next run at: %s EST)\" executed successfully", next_run) except Exception as e: logging.error(f"Error in scrape_classes: {e}") + # We're now handling job execution logging within each task function + # Initialize scheduler + logger.info("Starting scheduler") scheduler.init_app(app) scheduler.start() # Run initial scraping - logging.info("Running initial scraping...") + logger.info("Running initial scraping...") try: create_gym_table() - scrape_classes() - scrape_hours() - scrape_capacities() + logger.info("Gym table created") + + logger.info("Scraping classes from group-fitness-classes...") + fetch_classes(10) + logger.info("Initial class scraping complete") + + logger.info("Scraping hours from sheets...") + db_session.query(OpenHours).delete() + fetch_reg_facility() + fetch_reg_building() + fetch_sp_facility() + clean_past_hours() + logger.info("Initial hours scraping complete") + + logger.info("Scraping capacities from C2C...") + fetch_capacities() + logger.info("Initial capacities scraping complete") + + logger.info("Scraping equipment...") scrape_equipment() - logging.info("Scraping activities from sheets...") + logger.info("Initial equipment scraping complete") + + logger.info("Scraping activities from sheets...") fetch_activity() + logger.info("Initial activities scraping complete") + + logger.info("All initial scraping completed successfully") except Exception as e: - logging.error(f"Error during initial scraping: {e}") + logger.error(f"Error during initial scraping: {e}") diff --git a/migrations/env.py b/migrations/env.py index 6b3bfa6..169d487 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,5 +1,4 @@ from __future__ import with_statement -from flask import current_app import logging from logging.config import fileConfig @@ -22,6 +21,7 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata +from flask import current_app config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) target_metadata = current_app.extensions['migrate'].db.metadata diff --git a/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py b/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py deleted file mode 100644 index bf9d7f2..0000000 --- a/migrations/versions/24684343da0f_update_equipment_table_with_muscle_.py +++ /dev/null @@ -1,122 +0,0 @@ -"""update equipment table with muscle groups - -Revision ID: 24684343da0f -Revises: -Create Date: 2024-11-20 17:40:32.344965 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy import Enum -from enum import Enum as PyEnum - -# revision identifiers, used by Alembic. -revision = '24684343da0f' -down_revision = None -branch_labels = None -depends_on = None - -class MuscleGroup(PyEnum): - ABDOMINALS = 1 - CHEST = 2 - BACK = 3 - SHOULDERS = 4 - BICEPS = 5 - TRICEPS = 6 - HAMSTRINGS = 7 - QUADS = 8 - GLUTES = 9 - CALVES = 10 - MISCELLANEOUS = 11 - CARDIO = 12 - -def upgrade(): - # Create new muscle_group enum type - muscle_group_enum = postgresql.ENUM( - 'ABDOMINALS', 'CHEST', 'BACK', 'SHOULDERS', 'BICEPS', 'TRICEPS', - 'HAMSTRINGS', 'QUADS', 'GLUTES', 'CALVES', 'MISCELLANEOUS', 'CARDIO', - name='musclegroup' - ) - muscle_group_enum.create(op.get_bind()) - - # Add new columns first - op.add_column('equipment', sa.Column('clean_name', sa.String(), nullable=True)) - op.add_column('equipment', - sa.Column('muscle_groups', postgresql.ARRAY(muscle_group_enum), nullable=True) - ) - - # Update data: Set clean_name equal to name initially - op.execute('UPDATE equipment SET clean_name = name') - - # Convert equipment_type to muscle_groups based on mapping - op.execute(""" - UPDATE equipment SET muscle_groups = CASE - WHEN equipment_type = 'cardio' THEN ARRAY['CARDIO']::musclegroup[] - WHEN equipment_type = 'racks_and_benches' THEN ARRAY['CHEST', 'BACK', 'SHOULDERS']::musclegroup[] - WHEN equipment_type = 'selectorized' THEN ARRAY['MISCELLANEOUS']::musclegroup[] - WHEN equipment_type = 'multi_cable' THEN ARRAY['MISCELLANEOUS']::musclegroup[] - WHEN equipment_type = 'free_weights' THEN ARRAY['MISCELLANEOUS']::musclegroup[] - WHEN equipment_type = 'plate_loaded' THEN ARRAY['MISCELLANEOUS']::musclegroup[] - ELSE ARRAY['MISCELLANEOUS']::musclegroup[] - END - """) - - # Make clean_name not nullable after updating data - op.alter_column('equipment', 'clean_name', - existing_type=sa.String(), - nullable=False) - - # Make muscle_groups not nullable after data migration - op.alter_column('equipment', 'muscle_groups', - existing_type=postgresql.ARRAY(muscle_group_enum), - nullable=False) - - # Drop the old equipment_type column and enum - op.drop_column('equipment', 'equipment_type') - op.execute('DROP TYPE equipmenttype') - -def downgrade(): - # Create old equipment_type enum - op.execute(""" - CREATE TYPE equipmenttype AS ENUM ( - 'cardio', - 'racks_and_benches', - 'selectorized', - 'multi_cable', - 'free_weights', - 'miscellaneous', - 'plate_loaded' - ) - """) - - # Add equipment_type column - op.add_column('equipment', - sa.Column('equipment_type', postgresql.ENUM('cardio', 'racks_and_benches', 'selectorized', - 'multi_cable', 'free_weights', 'miscellaneous', - 'plate_loaded', name='equipmenttype'), - nullable=True)) - - # Convert muscle_groups back to equipment_type - op.execute(""" - UPDATE equipment SET equipment_type = CASE - WHEN 'CARDIO' = ANY(muscle_groups) THEN 'cardio'::equipmenttype - WHEN 'CHEST' = ANY(muscle_groups) OR 'BACK' = ANY(muscle_groups) OR 'SHOULDERS' = ANY(muscle_groups) - THEN 'racks_and_benches'::equipmenttype - ELSE 'miscellaneous'::equipmenttype - END - """) - - # Make equipment_type not nullable - op.alter_column('equipment', 'equipment_type', - existing_type=postgresql.ENUM('cardio', 'racks_and_benches', 'selectorized', - 'multi_cable', 'free_weights', 'miscellaneous', - 'plate_loaded', name='equipmenttype'), - nullable=False) - - # Drop new columns - op.drop_column('equipment', 'muscle_groups') - op.drop_column('equipment', 'clean_name') - - # Drop muscle_group enum - op.execute('DROP TYPE musclegroup') diff --git a/migrations/versions/30d67980c5af_remove_user_from_reports.py b/migrations/versions/30d67980c5af_remove_user_from_reports.py deleted file mode 100644 index 41fb249..0000000 --- a/migrations/versions/30d67980c5af_remove_user_from_reports.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Remove user from reports - -Revision ID: 30d67980c5af -Revises: 24684343da0f -Create Date: 2025-02-13 09:07:17.012872 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '30d67980c5af' -down_revision = '24684343da0f' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=0) - op.drop_constraint('report_user_id_fkey', 'report', type_='foreignkey') - op.drop_column('report', 'user_id') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('report', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) - op.create_foreign_key('report_user_id_fkey', 'report', 'users', ['user_id'], ['id']) - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=False) - # ### end Alembic commands ### diff --git a/migrations/versions/c3a3274d78a1_new_migration.py b/migrations/versions/c3a3274d78a1_new_migration.py deleted file mode 100644 index 7120855..0000000 --- a/migrations/versions/c3a3274d78a1_new_migration.py +++ /dev/null @@ -1,45 +0,0 @@ -"""new migration - -Revision ID: c3a3274d78a1 -Revises: 24684343da0f -Create Date: 2025-02-05 18:29:56.359089 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'c3a3274d78a1' -down_revision = '24684343da0f' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=0) - - op.create_table( - 'hourly_average_capacity', - sa.Column('id', sa.Integer(), primary_key=True), - sa.Column('facility_id', sa.Integer(), sa.ForeignKey("facility.id"), nullable=False), - sa.Column('average_percent', sa.Float(), nullable=False), - sa.Column('hour_of_day', sa.Integer(), nullable=False), - sa.Column('day_of_week', sa.Enum('Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', - name='dayofweekenum'), nullable=True), - sa.Column('history', postgresql.ARRAY(sa.Numeric()), nullable=False, server_default='{}'), - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('gear', 'cost', - existing_type=postgresql.DOUBLE_PRECISION(precision=53), - nullable=False) - - op.drop_table('hourly_average_capacity') - # ### end Alembic commands ### From d207ab5a54ece2010e59c6ba6d91e0bb7b84aac9 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 26 Feb 2025 17:31:10 -0500 Subject: [PATCH 34/53] Moved sentry initialization to app --- app.py | 9 +++++++++ app_factory.py | 9 --------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/app.py b/app.py index 58d56ea..0dd4595 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,13 @@ from app_factory import create_app +import sentry_sdk + + +# Initialize Sentry +sentry_sdk.init( + dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", + traces_sample_rate=1.0, + profiles_sample_rate=1.0, +) # Create Flask app with scrapers enabled app = create_app(run_migrations=False) diff --git a/app_factory.py b/app_factory.py index d4752ec..d70f6da 100644 --- a/app_factory.py +++ b/app_factory.py @@ -9,7 +9,6 @@ from src.schema import Query, Mutation from flasgger import Swagger from flask_graphql import GraphQLView -import sentry_sdk # Set up logging at module level logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", @@ -29,14 +28,6 @@ def create_app(run_migrations=False): """ logger.info("Initializing application") - # Initialize Sentry - logger.info("Configuring Sentry") - sentry_sdk.init( - dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", - traces_sample_rate=1.0, - profiles_sample_rate=1.0, - ) - # Create and configure Flask app app = Flask(__name__) app.debug = True From bae6d1a85a35f710c78275b7941ad44c70893988 Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Wed, 26 Feb 2025 17:50:49 -0500 Subject: [PATCH 35/53] Changed things mentioned in comments, updating naming for consistency --- schema.graphql | 17 ++++++++++++++--- src/models/user.py | 3 ++- src/schema.py | 20 ++++++++++---------- src/utils/utils.py | 16 ++++++++-------- 4 files changed, 34 insertions(+), 22 deletions(-) diff --git a/schema.graphql b/schema.graphql index fde9d18..351bfc0 100644 --- a/schema.graphql +++ b/schema.graphql @@ -71,6 +71,16 @@ type CreateReport { scalar DateTime +enum DayOfWeekEnum { + MONDAY + TUESDAY + WEDNESDAY + THURSDAY + FRIDAY + SATURDAY + SUNDAY +} + enum DayOfWeekGraphQLEnum { MONDAY TUESDAY @@ -137,7 +147,6 @@ type Gym { reports: [Report] } -scalar JSONString type HourlyAverageCapacity { id: ID! facilityId: Int! @@ -147,6 +156,8 @@ type HourlyAverageCapacity { history: [Float]! } +scalar JSONString + enum MuscleGroup { ABDOMINALS CHEST @@ -232,10 +243,10 @@ type User { email: String netId: String! name: String! - workoutGoal: [DayOfWeekGraphQLEnum] - giveaways: [Giveaway] activeStreak: Int maxStreak: Int + workoutGoal: [DayOfWeekEnum] + giveaways: [Giveaway] } type Workout { diff --git a/src/models/user.py b/src/models/user.py index 6f35259..5a99ee2 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -14,7 +14,8 @@ class User(Base): - `net_id` The user's Net ID. - `name` The user's name. - `workout_goal` The days of the week the user has set as their personal goal. - - `active_streak` The number of weeks the user has met their personal goal. + - `active_streak` The number of consecutive weeks the user has met their personal goal. + - `max_streak` The maximum number of consecutive weeks the user has met their personal goal. - `workout_goal` The max number of weeks the user has met their personal goal. """ diff --git a/src/schema.py b/src/schema.py index f6a65aa..ca0c201 100644 --- a/src/schema.py +++ b/src/schema.py @@ -186,9 +186,9 @@ class User(SQLAlchemyObjectType): class Meta: model = UserModel - current_streak = graphene.Int(description="The user's current workout streak in days.") - max_streak = graphene.Int(description="The user's maximum workout streak.") - workout_goal = graphene.List(DayOfWeekGraphQLEnum) + # current_streak = graphene.Int(description="The user's current workout streak in days.") + # max_streak = graphene.Int(description="The user's maximum workout streak.") + # workout_goal = graphene.List(DayOfWeekGraphQLEnum) class UserInput(graphene.InputObjectType): @@ -244,8 +244,8 @@ class Query(graphene.ObjectType): get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) get_all_reports = graphene.List(Report, description="Get all reports.") - get_workout_goals = graphene.List(graphene.String, user_id=graphene.Int(required=True), description="Get the workout goals of a user by ID.") - get_user_streak = graphene.Field(graphene.JSONString, user_id=graphene.Int(required=True), description="Get the current and max workout streak of a user.") + get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), description="Get the workout goals of a user by ID.") + get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int(required=True), description="Get the current and max workout streak of a user.") get_hourly_average_capacities_by_facility_id = graphene.List( HourlyAverageCapacity, facility_id=graphene.Int(), description="Get all facility hourly average capacities." ) @@ -322,13 +322,13 @@ def resolve_get_user_streak(self, info, id): ) if not workouts: - return {"current_streak": 0, "max_streak": 0} + return {"active_streak": 0, "max_streak": 0} workout_dates = {workout.workout_time.date() for workout in workouts} sorted_dates = sorted(workout_dates, reverse=True) today = datetime.utcnow().date() - current_streak = 0 + active_streak = 0 max_streak = 0 streak = 0 prev_date = None @@ -341,12 +341,12 @@ def resolve_get_user_streak(self, info, id): streak += 1 prev_date = date - if date == today or (date == today - timedelta(days=1) and current_streak == 0): - current_streak = streak + if date == today or (date == today - timedelta(days=1) and active_streak == 0): + active_streak = streak max_streak = max(max_streak, streak) - return {"current_streak": current_streak, "max_streak": max_streak} + return {"active_streak": active_streak, "max_streak": max_streak} def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] diff --git a/src/utils/utils.py b/src/utils/utils.py index e46ab6e..2233eab 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -152,10 +152,10 @@ def calculate_streaks(user, workouts, workout_goal): - `workout_goal` A list of goal days (e.g., ['Monday', 'Wednesday']). Returns: - - Updates `user.current_streak` and `user.max_streak`. + - Updates `user.active_streak` and `user.max_streak`. """ if not workouts: - user.current_streak = 0 + user.active_streak = 0 user.max_streak = user.max_streak or 0 return @@ -168,7 +168,7 @@ def calculate_streaks(user, workouts, workout_goal): # Sort by workout date valid_workouts.sort(key=lambda x: x.workout_time) - current_streak = 1 + active_streak = 1 max_streak = user.max_streak or 0 for i in range(1, len(valid_workouts)): @@ -182,13 +182,13 @@ def calculate_streaks(user, workouts, workout_goal): # Check if current workout is on the expected next goal day if curr_day.date() == expected_next_day.date(): - current_streak += 1 + active_streak += 1 else: - max_streak = max(max_streak, current_streak) - current_streak = 1 + max_streak = max(max_streak, active_streak) + active_streak = 1 # Final update - max_streak = max(max_streak, current_streak) - user.current_streak = current_streak + max_streak = max(max_streak, active_streak) + user.active_streak = active_streak user.max_streak = max_streak From 3d9ed74276e220b37774e03849d7adbb37b15024 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Wed, 26 Feb 2025 17:58:22 -0500 Subject: [PATCH 36/53] Removed gunicorn and only run sentry on dev/prod --- app.py | 15 ++++++++------- requirements.txt | 1 - 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/app.py b/app.py index 0dd4595..899e6f0 100644 --- a/app.py +++ b/app.py @@ -1,13 +1,14 @@ from app_factory import create_app import sentry_sdk +import os - -# Initialize Sentry -sentry_sdk.init( - dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", - traces_sample_rate=1.0, - profiles_sample_rate=1.0, -) +# Initialize Sentry only if not in local +if os.environ.get('FLASK_ENV') in ["development", "production"]: + sentry_sdk.init( + dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560", + traces_sample_rate=1.0, + profiles_sample_rate=1.0, + ) # Create Flask app with scrapers enabled app = create_app(run_migrations=False) diff --git a/requirements.txt b/requirements.txt index 87f0460..ef52401 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,6 @@ graphql-relay==0.4.5 graphql-server-core==1.1.1 greenlet==2.0.2 gspread==5.12.3 -gunicorn==19.9.0 identify==2.5.24 idna==2.6 importlib-metadata==6.7.0 From fd57abb1e28755ae77fb0e6ec15c87ae549a2f14 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Thu, 27 Feb 2025 20:10:34 -0500 Subject: [PATCH 37/53] Added versions directory for migrations --- migrations/versions/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 migrations/versions/.gitkeep diff --git a/migrations/versions/.gitkeep b/migrations/versions/.gitkeep new file mode 100644 index 0000000..e69de29 From b42f589e57acee83df471d949ecd11c74fe8805e Mon Sep 17 00:00:00 2001 From: Aayush Agnihotri <68517064+Aayush-Agnihotri@users.noreply.github.com> Date: Fri, 28 Feb 2025 11:22:36 -0500 Subject: [PATCH 38/53] Update app.py --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 899e6f0..70a7c30 100644 --- a/app.py +++ b/app.py @@ -14,4 +14,4 @@ app = create_app(run_migrations=False) if __name__ == "__main__": - app.run(host="127.0.0.1", port=5000) + app.run(host="0.0.0.0", port=5000) From a5d587451410fb39bea1f5953d8ed66aa048667b Mon Sep 17 00:00:00 2001 From: Aayush Agnihotri <68517064+Aayush-Agnihotri@users.noreply.github.com> Date: Fri, 28 Feb 2025 11:23:31 -0500 Subject: [PATCH 39/53] Delete .github/workflows/test.yml --- .github/workflows/test.yml | 54 -------------------------------------- 1 file changed, 54 deletions(-) delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 8920685..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: test - -on: - push: - branches: - - master - - release/* - pull_request: - branches: - - master - - release/* - -jobs: - build: - runs-on: ubuntu-latest - - services: - postgres: - image: postgres:latest - env: - POSTGRES_DB: uplift - POSTGRES_PASSWORD: password - POSTGRES_USER: local - ports: - - 5432:5432 - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 - with: - python-version: 3.9 - - name: install and test - run: | - sudo apt-get update - sudo apt-get install libxml2-dev libxslt-dev - sudo apt-get install --yes --no-install-recommends postgresql-client - pip install --force-reinstall pip==20.0.2 - pip install --force-reinstall setuptools==44.0.0 - pip freeze - pip install -r requirements.txt - python -m unittest src.tests.test_scraper - env: - DB_HOST: localhost - DB_NAME: uplift - DB_PORT: 5432 - DB_PASSWORD: password - DB_USERNAME: local - FLASK_ENV: dev - GOOGLE_SERVICE_ACCOUNT_PATH: service-account-key.json From 408b4b5cc99ced5b48f4e8d96be4676dd202be6b Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Fri, 28 Feb 2025 21:13:40 -0500 Subject: [PATCH 40/53] update migrations --- migrations/alembic.ini | 1 + migrations/env.py | 15 +++-- .../versions/31b1fa20772f_popular_times.py | 56 +++++++++++++++++++ 3 files changed, 66 insertions(+), 6 deletions(-) create mode 100644 migrations/versions/31b1fa20772f_popular_times.py diff --git a/migrations/alembic.ini b/migrations/alembic.ini index f8ed480..d676023 100644 --- a/migrations/alembic.ini +++ b/migrations/alembic.ini @@ -1,6 +1,7 @@ # A generic, single database configuration. [alembic] +script_location = migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s diff --git a/migrations/env.py b/migrations/env.py index 169d487..e4f27ce 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,4 +1,8 @@ from __future__ import with_statement +import sys +import os +sys.path.append(os.path.abspath(os.path.dirname(__file__) + "/..")) +from app import app import logging from logging.config import fileConfig @@ -21,10 +25,9 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from flask import current_app -config.set_main_option('sqlalchemy.url', - current_app.config.get('SQLALCHEMY_DATABASE_URI')) -target_metadata = current_app.extensions['migrate'].db.metadata +with app.app_context(): + config.set_main_option('sqlalchemy.url', app.config['SQLALCHEMY_DATABASE_URI']) + target_metadata = app.extensions['migrate'].db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: @@ -82,7 +85,7 @@ def process_revision_directives(context, revision, directives): connection=connection, target_metadata=target_metadata, process_revision_directives=process_revision_directives, - **current_app.extensions['migrate'].configure_args + **app.extensions['migrate'].configure_args ) with context.begin_transaction(): @@ -92,4 +95,4 @@ def process_revision_directives(context, revision, directives): if context.is_offline_mode(): run_migrations_offline() else: - run_migrations_online() + run_migrations_online() \ No newline at end of file diff --git a/migrations/versions/31b1fa20772f_popular_times.py b/migrations/versions/31b1fa20772f_popular_times.py new file mode 100644 index 0000000..6f50ab8 --- /dev/null +++ b/migrations/versions/31b1fa20772f_popular_times.py @@ -0,0 +1,56 @@ +"""popular times + +Revision ID: 31b1fa20772f +Revises: +Create Date: 2025-02-28 20:57:19.922403 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = '31b1fa20772f' +down_revision = None +branch_labels = None +depends_on = None + + +### Ensures alembic does not try to create enum +day_of_week_enum = postgresql.ENUM( + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', + name='dayofweekenum', create_type=False +) + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('gear', 'cost', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=0) + + ### Alembic is running command multiple times. + op.execute(""" + DO $$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'hourly_average_capacity') THEN + CREATE TABLE hourly_average_capacity ( + id SERIAL PRIMARY KEY, + facility_id INTEGER NOT NULL REFERENCES facility(id), + average_percent FLOAT NOT NULL, + hour_of_day INTEGER NOT NULL, + day_of_week dayofweekenum, + history NUMERIC[] DEFAULT '{}' NOT NULL + ); + END IF; + END $$; + """) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + op.drop_table('hourly_average_capacity') + # ### end Alembic commands ### + From 7682b4ba859ed1e448c0b90daa9af083a5fe0896 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Sun, 2 Mar 2025 14:11:32 -0500 Subject: [PATCH 41/53] Added delete user by id mutation --- schema.graphql | 1 + src/schema.py | 21 ++++++++++++++++++--- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/schema.graphql b/schema.graphql index 3bf7812..613b62a 100644 --- a/schema.graphql +++ b/schema.graphql @@ -168,6 +168,7 @@ type Mutation { setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(userId: Int!, workoutTime: DateTime!): Workout createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport + deleteUser(userId: Int!): User } type OpenHours { diff --git a/src/schema.py b/src/schema.py index 1f3b4e8..11f6869 100644 --- a/src/schema.py +++ b/src/schema.py @@ -116,7 +116,7 @@ class Meta: model = CapacityModel -#MARK - Hourly Average Capacity +# MARK - Hourly Average Capacity class HourlyAverageCapacity(SQLAlchemyObjectType): class Meta: model = HourlyAverageCapacityModel @@ -252,7 +252,7 @@ def resolve_get_all_gyms(self, info): def resolve_activities(self, info): query = Activity.get_query(info) return query.all() - + def resolve_get_user_by_net_id(self, info, net_id): user = User.get_query(info).filter(UserModel.net_id == net_id).all() if not user: @@ -296,7 +296,7 @@ def resolve_get_weekly_workout_days(self, info, id): def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query - + def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] if facility_id not in valid_facility_ids: @@ -451,6 +451,20 @@ def mutate(self, info, description, issue, created_at, gym_id): db_session.commit() return CreateReport(report=report) +class DeleteUserById(graphene.Mutation): + class Arguments: + user_id = graphene.Int(required=True) + Output = User + + def mutate(self, info, user_id): + # Check if user exists + user = User.get_query(info).filter(UserModel.id == user_id).first() + if not user: + raise GraphQLError("User with given ID does not exist.") + db_session.delete(user) + db_session.commit() + return user + class Mutation(graphene.ObjectType): create_giveaway = CreateGiveaway.Field(description="Creates a new giveaway.") @@ -459,6 +473,7 @@ class Mutation(graphene.ObjectType): set_workout_goals = SetWorkoutGoals.Field(description="Set a user's workout goals.") log_workout = logWorkout.Field(description="Log a user's workout.") create_report = CreateReport.Field(description="Creates a new report.") + delete_user = DeleteUserById.Field(description="Deletes a user by ID.") schema = graphene.Schema(query=Query, mutation=Mutation) From 65308193bb17ac45950098ec7df82126a18c6db7 Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Sun, 2 Mar 2025 19:20:01 -0500 Subject: [PATCH 42/53] popular times fix --- app_factory.py | 56 ++++++++++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/app_factory.py b/app_factory.py index d70f6da..451d5a6 100644 --- a/app_factory.py +++ b/app_factory.py @@ -1,4 +1,5 @@ import logging +from datetime import datetime from flask import Flask, render_template from graphene import Schema from graphql.utils import schema_printer @@ -11,11 +12,10 @@ from flask_graphql import GraphQLView # Set up logging at module level -logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", - level=logging.INFO, - datefmt="%Y-%m-%d %H:%M:%S") +logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") logger = logging.getLogger(__name__) + def create_app(run_migrations=False): """ Application factory for Flask app. @@ -37,14 +37,13 @@ def create_app(run_migrations=False): if not all([db_user, db_password, db_name, db_host, db_port]): logger.error("Missing required database configuration variables") raise ValueError( - "Missing required database configuration. " - "Please ensure all database environment variables are set." + "Missing required database configuration. " "Please ensure all database environment variables are set." ) # Configure database logger.info("Configuring database connection to %s:%s/%s", db_host, db_port, db_name) - app.config['SQLALCHEMY_DATABASE_URI'] = db_url - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + app.config["SQLALCHEMY_DATABASE_URI"] = db_url + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False # Set up extensions logger.info("Setting up Flask extensions") @@ -85,11 +84,12 @@ def shutdown_session(exception=None): logger.info("Application initialization complete") return app + def setup_scrapers(app): """Set up scrapers and scheduled tasks""" # Import scraper-related modules only when needed from flask_apscheduler import APScheduler - from src.scrapers.capacities_scraper import fetch_capacities + from src.scrapers.capacities_scraper import fetch_capacities, update_hourly_capacity from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility from src.scrapers.scraper_helpers import clean_past_hours from src.scrapers.sp_hours_scraper import fetch_sp_facility @@ -110,9 +110,9 @@ def setup_scrapers(app): # Scrape hours every 15 minutes @scheduler.task("interval", id="scrape_hours", seconds=900) def scrape_hours(): - job = scheduler.get_job('scrape_hours') - next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" - logging.info("Running job \"scrape_hours (trigger: interval[0:15:00], next run at: %s EST)\"", next_run) + job = scheduler.get_job("scrape_hours") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_hours (trigger: interval[0:15:00], next run at: %s EST)"', next_run) try: logging.info("Scraping hours from sheets...") # Clear hours @@ -122,38 +122,54 @@ def scrape_hours(): fetch_sp_facility() clean_past_hours() logging.info( - "Job \"scrape_hours (trigger: interval[0:15:00], next run at: %s EST)\" executed successfully", next_run) + 'Job "scrape_hours (trigger: interval[0:15:00], next run at: %s EST)" executed successfully', next_run + ) except Exception as e: logging.error(f"Error in scrape_hours: {e}") # Scrape capacities every 10 minutes @scheduler.task("interval", id="scrape_capacities", seconds=600) def scrape_capacities(): - job = scheduler.get_job('scrape_capacities') - next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" - logging.info("Running job \"scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)\"", next_run) + job = scheduler.get_job("scrape_capacities") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)"', next_run) try: logging.info("Scraping capacities from C2C...") fetch_capacities() logging.info( - "Job \"scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)\" executed successfully", next_run) + 'Job "scrape_capacities (trigger: interval[0:10:00], next run at: %s EST)" executed successfully', + next_run, + ) except Exception as e: logging.error(f"Error in scrape_capacities: {e}") # Scrape classes every hour @scheduler.task("interval", id="scrape_classes", seconds=3600) def scrape_classes(): - job = scheduler.get_job('scrape_classes') - next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if job and job.next_run_time else "Unknown" - logging.info("Running job \"scrape_classes (trigger: interval[1:00:00], next run at: %s EST)\"", next_run) + job = scheduler.get_job("scrape_classes") + next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") if job and job.next_run_time else "Unknown" + logging.info('Running job "scrape_classes (trigger: interval[1:00:00], next run at: %s EST)"', next_run) try: logging.info("Scraping classes from group-fitness-classes...") fetch_classes(10) logging.info( - "Job \"scrape_classes (trigger: interval[1:00:00], next run at: %s EST)\" executed successfully", next_run) + 'Job "scrape_classes (trigger: interval[1:00:00], next run at: %s EST)" executed successfully', next_run + ) except Exception as e: logging.error(f"Error in scrape_classes: {e}") + # Update hourly average capacity every hour + @scheduler.task("cron", id="update_capacity", hour="*") + def scheduled_job(): + current_time = datetime.now() + current_day = current_time.strftime("%A").upper() + current_hour = current_time.hour + try: + logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...") + update_hourly_capacity(current_day, current_hour) + except Exception as e: + logging.error(f"Error updating hourly average capacity for {current_day}, hour {current_hour}: {e}") + # We're now handling job execution logging within each task function # Initialize scheduler From 2a1820f32650a6e54dc612515a8f812ce6fa4315 Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Tue, 4 Mar 2025 22:46:30 -0500 Subject: [PATCH 43/53] Added migration file --- ..._added_active_streak_and_max_streak_to_.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py diff --git a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py new file mode 100644 index 0000000..17c54c6 --- /dev/null +++ b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py @@ -0,0 +1,34 @@ +"""Added active_streak and max_streak to users + +Revision ID: 6b01a81bb92b +Revises: 31b1fa20772f +Create Date: 2025-03-04 22:45:06.601964 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6b01a81bb92b' +down_revision = '31b1fa20772f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('report_user_id_fkey', 'report', type_='foreignkey') + op.drop_column('report', 'user_id') + op.add_column('users', sa.Column('active_streak', sa.Integer(), nullable=True)) + op.add_column('users', sa.Column('max_streak', sa.Integer(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('report', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) + op.create_foreign_key('report_user_id_fkey', 'report', 'users', ['user_id'], ['id']) + op.drop_column('users', 'active_streak') + op.drop_column('users', 'max_streak') + # ### end Alembic commands ### From e0fe8b9ab193f5e09173c032ff616d369f999c9d Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Wed, 5 Mar 2025 18:14:07 -0500 Subject: [PATCH 44/53] Remove report lines from migrations file --- .../6b01a81bb92b_added_active_streak_and_max_streak_to_.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py index 17c54c6..6f21ec9 100644 --- a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py +++ b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py @@ -20,8 +20,6 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('report_user_id_fkey', 'report', type_='foreignkey') op.drop_column('report', 'user_id') - op.add_column('users', sa.Column('active_streak', sa.Integer(), nullable=True)) - op.add_column('users', sa.Column('max_streak', sa.Integer(), nullable=True)) # ### end Alembic commands ### @@ -29,6 +27,4 @@ def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('report', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) op.create_foreign_key('report_user_id_fkey', 'report', 'users', ['user_id'], ['id']) - op.drop_column('users', 'active_streak') - op.drop_column('users', 'max_streak') # ### end Alembic commands ### From 9ded3d700e57649064e7e059fc34b8dccfeafffe Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Wed, 5 Mar 2025 18:16:30 -0500 Subject: [PATCH 45/53] Remove report lines from migrations file --- ...6b01a81bb92b_added_active_streak_and_max_streak_to_.py | 8 ++++---- src/database.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py index 6f21ec9..81d9121 100644 --- a/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py +++ b/migrations/versions/6b01a81bb92b_added_active_streak_and_max_streak_to_.py @@ -18,13 +18,13 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('report_user_id_fkey', 'report', type_='foreignkey') - op.drop_column('report', 'user_id') + op.add_column('users', sa.Column('active_streak', sa.Integer(), nullable=True)) + op.add_column('users', sa.Column('max_streak', sa.Integer(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('report', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) - op.create_foreign_key('report_user_id_fkey', 'report', 'users', ['user_id'], ['id']) + op.drop_column('users', 'active_streak') + op.drop_column('users', 'max_streak') # ### end Alembic commands ### diff --git a/src/database.py b/src/database.py index 951ff8c..fe0a131 100644 --- a/src/database.py +++ b/src/database.py @@ -3,8 +3,8 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -# import dotenv -# dotenv.load_dotenv() +import dotenv +dotenv.load_dotenv() # Get database credentials with logging db_user = os.getenv("DB_USERNAME") From 42b0d6f69f94b48d1451f9523c5e5e600b9feff9 Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Wed, 5 Mar 2025 18:18:05 -0500 Subject: [PATCH 46/53] Recommented lines --- src/database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/database.py b/src/database.py index fe0a131..951ff8c 100644 --- a/src/database.py +++ b/src/database.py @@ -3,8 +3,8 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -import dotenv -dotenv.load_dotenv() +# import dotenv +# dotenv.load_dotenv() # Get database credentials with logging db_user = os.getenv("DB_USERNAME") From 0d3569fa5341cbc2944479f0de3d4575e379ee9d Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Thu, 6 Mar 2025 21:13:46 -0500 Subject: [PATCH 47/53] initial commit --- app_factory.py | 23 +++++++++ dump.rdb | Bin 0 -> 203 bytes requirements.txt | 3 +- schema.graphql | 16 ++++++ src/models/token_blacklist.py | 11 ++++ src/schema.py | 93 ++++++++++++++++++++++++++++++++-- src/utils/constants.py | 3 ++ 7 files changed, 144 insertions(+), 5 deletions(-) create mode 100644 dump.rdb create mode 100644 src/models/token_blacklist.py diff --git a/app_factory.py b/app_factory.py index 451d5a6..9754b75 100644 --- a/app_factory.py +++ b/app_factory.py @@ -1,8 +1,11 @@ import logging +from datetime import timedelta, timezone +from flask_jwt_extended import JWTManager from datetime import datetime from flask import Flask, render_template from graphene import Schema from graphql.utils import schema_printer +from src.utils.constants import JWT_SECRET_KEY from src.database import db_session, init_db from src.database import Base as db from src.database import db_url, db_user, db_password, db_name, db_host, db_port @@ -10,6 +13,8 @@ from src.schema import Query, Mutation from flasgger import Swagger from flask_graphql import GraphQLView +from src.models.token_blacklist import TokenBlocklist + # Set up logging at module level logging.basicConfig(format="%(asctime)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S") @@ -51,6 +56,17 @@ def create_app(run_migrations=False): schema = Schema(query=Query, mutation=Mutation) swagger = Swagger(app) + app.config["JWT_SECRET_KEY"] = JWT_SECRET_KEY + app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(hours=1) + app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=30) + + jwt = JWTManager(app) + + @jwt.token_in_blocklist_loader + def check_if_token_revoked(jwt_header, jwt_payload: dict) -> bool: + jti = jwt_payload["jti"] + return db_session.query(TokenBlocklist.id).filter_by(jti=jti).scalar() is not None + # Configure routes logger.info("Configuring routes") @@ -158,6 +174,13 @@ def scrape_classes(): except Exception as e: logging.error(f"Error in scrape_classes: {e}") + @scheduler.task("interval", id="cleanup_expired_tokens", hours=24) + def cleanup_expired_tokens(): + logger.info("Deleting expired tokens...") + now = datetime.now(timezone.utc) + db_session.query(TokenBlocklist).filter(TokenBlocklist.expires_at < now).delete() + db_session.commit() + # Update hourly average capacity every hour @scheduler.task("cron", id="update_capacity", hour="*") def scheduled_job(): diff --git a/dump.rdb b/dump.rdb new file mode 100644 index 0000000000000000000000000000000000000000..8ad52aedda30e49203b7ff023b26c4e10aa95140 GIT binary patch literal 203 zcmWG?b@2=~FfcUy#aWb^l3A=H&s-48NI}{_NQ4_;V^F0|SFfYI35vSyHNz zu3?I0vaU(0rJ1f}vWbDNfw^Ilfq}7+MY4q@dr@jxes*fgpCb?4pxO)!l9CLJQ;c*i pOcITCO@In@6U~wgbd!=zl8sF)Q&N+Y%wgL8``%q%+39fU8UR!VNPGYQ literal 0 HcmV?d00001 diff --git a/requirements.txt b/requirements.txt index ef52401..3d4cc15 100644 --- a/requirements.txt +++ b/requirements.txt @@ -78,4 +78,5 @@ wasmer-compiler-cranelift==1.1.0 wcwidth==0.2.6 Werkzeug==2.2.2 zipp==3.15.0 -sentry-sdk==2.13.0 \ No newline at end of file +sentry-sdk==2.13.0 +flask_jwt_extended==4.7.1 \ No newline at end of file diff --git a/schema.graphql b/schema.graphql index 613b62a..9a89559 100644 --- a/schema.graphql +++ b/schema.graphql @@ -146,6 +146,15 @@ type HourlyAverageCapacity { history: [Float]! } +type LoginUser { + accessToken: String + refreshToken: String +} + +type LogoutUser { + success: Boolean +} + enum MuscleGroup { ABDOMINALS CHEST @@ -167,6 +176,9 @@ type Mutation { enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(userId: Int!, workoutTime: DateTime!): Workout + loginUser(netId: String!): LoginUser + logoutUser: LogoutUser + refreshAccessToken: RefreshAccessToken createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport deleteUser(userId: Int!): User } @@ -208,6 +220,10 @@ type Query { getHourlyAverageCapacitiesByFacilityId(facilityId: Int): [HourlyAverageCapacity] } +type RefreshAccessToken { + newAccessToken: String +} + type Report { id: ID! createdAt: DateTime! diff --git a/src/models/token_blacklist.py b/src/models/token_blacklist.py new file mode 100644 index 0000000..ad2d490 --- /dev/null +++ b/src/models/token_blacklist.py @@ -0,0 +1,11 @@ +from sqlalchemy import Column, Float, String, Integer, DateTime +from sqlalchemy.orm import relationship +from src.database import Base + + +class TokenBlocklist(Base): + __tablename__ = "token_blacklist" + + id = Column(Integer, primary_key=True) + jti = Column(String(36), index=True, nullable=False) + expires_at = Column(DateTime, nullable=False) diff --git a/src/schema.py b/src/schema.py index 11f6869..d4a592e 100644 --- a/src/schema.py +++ b/src/schema.py @@ -1,5 +1,15 @@ import graphene -from datetime import datetime, timedelta +import os +from flask_jwt_extended import ( + create_access_token, + create_refresh_token, + verify_jwt_in_request, + get_jwt_identity, + get_jwt, + jwt_required, +) +from functools import wraps +from datetime import datetime, timedelta, timezone from graphene_sqlalchemy import SQLAlchemyObjectType from graphql import GraphQLError from src.models.capacity import Capacity as CapacityModel @@ -11,6 +21,7 @@ from src.models.activity import Activity as ActivityModel, Price as PriceModel from src.models.classes import Class as ClassModel from src.models.classes import ClassInstance as ClassInstanceModel +from src.models.token_blacklist import TokenBlocklist from src.models.user import User as UserModel from src.models.enums import DayOfWeekGraphQLEnum from src.models.giveaway import Giveaway as GiveawayModel @@ -19,6 +30,7 @@ from src.models.report import Report as ReportModel from src.models.hourly_average_capacity import HourlyAverageCapacity as HourlyAverageCapacityModel from src.database import db_session +from flask import current_app # MARK: - Gym @@ -185,6 +197,7 @@ def resolve_pricing(self, info): class User(SQLAlchemyObjectType): class Meta: model = UserModel + workout_goal = graphene.List(DayOfWeekGraphQLEnum) @@ -216,8 +229,10 @@ class Workout(SQLAlchemyObjectType): class Meta: model = WorkoutModel + # MARK: - Report + class Report(SQLAlchemyObjectType): class Meta: model = ReportModel @@ -228,6 +243,7 @@ def resolve_gym(self, info): query = Gym.get_query(info).filter(GymModel.id == self.gym_id).first() return query + # MARK: - Query @@ -264,6 +280,7 @@ def resolve_get_users_by_giveaway_id(self, info, id): users = [User.get_query(info).filter(UserModel.id == entry.user_id).first() for entry in entries] return users + @jwt_required() def resolve_get_workouts_by_id(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -271,6 +288,7 @@ def resolve_get_workouts_by_id(self, info, id): workouts = Workout.get_query(info).filter(WorkoutModel.user_id == user.id).all() return workouts + @jwt_required() def resolve_get_weekly_workout_days(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -308,6 +326,59 @@ def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id # MARK: - Mutation +class LoginUser(graphene.Mutation): + class Arguments: + net_id = graphene.String(required=True) + + access_token = graphene.String() + refresh_token = graphene.String() + + def mutate(self, info, net_id): + user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + if not user: + return GraphQLError("No user with those credentials. Please create an account and try again.") + + # Generate JWT token + access_token = create_access_token(identity=user.id) + refresh_token = create_refresh_token(identity=user.id) + + user.refresh_token = refresh_token + db_session.commit() + + return LoginUser(access_token=access_token, refresh_token=refresh_token) + + +class RefreshAccessToken(graphene.Mutation): + new_access_token = graphene.String() + + @jwt_required(refresh=True) + def mutate(self, info): + identity = get_jwt_identity() + + new_access_token = create_access_token(identity=identity) + return RefreshAccessToken(new_access_token=new_access_token) + + +# WHAT happens if a user tries to access this route if they are not logged in? +class LogoutUser(graphene.Mutation): + success = graphene.Boolean() + + @jwt_required(verify_type=False) # Allows both access and refresh tokens + def mutate(self, info): + token = get_jwt() + jti = token["jti"] # Unique identifier for the token + + # Get expiration time from JWT itself + expires_at = datetime.fromtimestamp(token["exp"], tz=timezone.utc) + + # Store in blocklist + token = TokenBlocklist(jti=jti, expires_at=expires_at) + db_session.add(token) + db_session.commit() + + return LogoutUser(success=True) + + class CreateUser(graphene.Mutation): class Arguments: name = graphene.String(required=True) @@ -336,6 +407,7 @@ class Arguments: Output = GiveawayInstance + @jwt_required() def mutate(self, info, user_net_id, giveaway_id): # Check if NetID and Giveaway ID exists user = User.get_query(info).filter(UserModel.net_id == user_net_id).first() @@ -389,6 +461,7 @@ class Arguments: Output = User + @jwt_required() def mutate(self, info, user_id, workout_goal): user = User.get_query(info).filter(UserModel.id == user_id).first() if not user: @@ -417,6 +490,7 @@ class Arguments: Output = Workout + @jwt_required() def mutate(self, info, workout_time, user_id): user = User.get_query(info).filter(UserModel.id == user_id).first() if not user: @@ -428,6 +502,7 @@ def mutate(self, info, workout_time, user_id): db_session.commit() return workout + class CreateReport(graphene.Mutation): class Arguments: issue = graphene.String(required=True) @@ -443,17 +518,24 @@ def mutate(self, info, description, issue, created_at, gym_id): if not gym: raise GraphQLError("Gym with given ID does not exist.") # Check if issue is a valid enumeration - if issue not in ["INACCURATE_EQUIPMENT", "INCORRECT_HOURS", "INACCURATE_DESCRIPTION", "WAIT_TIMES_NOT_UPDATED", "OTHER"]: + if issue not in [ + "INACCURATE_EQUIPMENT", + "INCORRECT_HOURS", + "INACCURATE_DESCRIPTION", + "WAIT_TIMES_NOT_UPDATED", + "OTHER", + ]: raise GraphQLError("Issue is not a valid enumeration.") - report = ReportModel(description=description, issue=issue, - created_at=created_at, gym_id=gym_id) + report = ReportModel(description=description, issue=issue, created_at=created_at, gym_id=gym_id) db_session.add(report) db_session.commit() return CreateReport(report=report) + class DeleteUserById(graphene.Mutation): class Arguments: user_id = graphene.Int(required=True) + Output = User def mutate(self, info, user_id): @@ -472,6 +554,9 @@ class Mutation(graphene.ObjectType): enter_giveaway = EnterGiveaway.Field(description="Enters a user into a giveaway.") set_workout_goals = SetWorkoutGoals.Field(description="Set a user's workout goals.") log_workout = logWorkout.Field(description="Log a user's workout.") + login_user = LoginUser.Field(description="Login a user.") + logout_user = LogoutUser.Field(description="Logs out a user.") + refresh_access_token = RefreshAccessToken.Field(description="Refreshes the access token.") create_report = CreateReport.Field(description="Creates a new report.") delete_user = DeleteUserById.Field(description="Deletes a user by ID.") diff --git a/src/utils/constants.py b/src/utils/constants.py index 50a2c08..3d0ed63 100644 --- a/src/utils/constants.py +++ b/src/utils/constants.py @@ -53,6 +53,9 @@ # The path for Helen Newman Fitness Center details HNH_DETAILS = "https://scl.cornell.edu/recreation/facility/helen-newman-fitness-center" +# JWT secret key +JWT_SECRET_KEY = os.environ["JWT_SECRET_KEY"] + # Marker in sheets for alternating between badminton and volleyball (HNH Fridays) MARKER_ALT = "(ALT)" From 1d86a92fb1d3d41e118d67ed6d6865573a05bc59 Mon Sep 17 00:00:00 2001 From: Joshua Dirga Date: Thu, 6 Mar 2025 21:44:23 -0500 Subject: [PATCH 48/53] Added facility id field to workout --- ...e_added_fitness_center_to_workout_model.py | 30 +++++++++++++++++++ schema.graphql | 3 +- src/models/workout.py | 2 ++ src/schema.py | 20 ++++++++----- 4 files changed, 47 insertions(+), 8 deletions(-) create mode 100644 migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py diff --git a/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py b/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py new file mode 100644 index 0000000..93b9235 --- /dev/null +++ b/migrations/versions/0fde4435424e_added_fitness_center_to_workout_model.py @@ -0,0 +1,30 @@ +"""Added fitness center to workout model + +Revision ID: 0fde4435424e +Revises: 6b01a81bb92b +Create Date: 2025-03-06 20:50:25.488572 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0fde4435424e' +down_revision = '6b01a81bb92b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workout', sa.Column('facility_id', sa.Integer(), nullable=False)) + op.create_foreign_key(None, 'workout', 'facility', ['facility_id'], ['id']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'workout', type_='foreignkey') + op.drop_column('workout', 'facility_id') + # ### end Alembic commands ### diff --git a/schema.graphql b/schema.graphql index 4c3ab60..219600a 100644 --- a/schema.graphql +++ b/schema.graphql @@ -178,7 +178,7 @@ type Mutation { createUser(email: String!, name: String!, netId: String!): User enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User - logWorkout(userId: Int!, workoutTime: DateTime!): Workout + logWorkout(facilityId: Int!, userId: Int!, workoutTime: DateTime!): Workout createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport deleteUser(userId: Int!): User } @@ -254,4 +254,5 @@ type Workout { id: ID! workoutTime: DateTime! userId: Int! + facilityId: Int! } diff --git a/src/models/workout.py b/src/models/workout.py index 43f82a9..946b6f8 100644 --- a/src/models/workout.py +++ b/src/models/workout.py @@ -11,6 +11,7 @@ class Workout(Base): - `id` The ID of user. - `workout_time` The date and time of the workout. - `user_id` The ID of the user who completed the workout. + - `facility_id` The ID of the facility visited """ __tablename__ = "workout" @@ -18,3 +19,4 @@ class Workout(Base): id = Column(Integer, primary_key=True) workout_time = Column(DateTime(), nullable=False) # should this be nullable? user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + facility_id = Column(Integer, ForeignKey("facility.id"), nullable=False) diff --git a/src/schema.py b/src/schema.py index 546304b..53cedd0 100644 --- a/src/schema.py +++ b/src/schema.py @@ -240,8 +240,10 @@ class Query(graphene.ObjectType): get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) get_all_reports = graphene.List(Report, description="Get all reports.") - get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), description="Get the workout goals of a user by ID.") - get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int(required=True), description="Get the current and max workout streak of a user.") + get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), + description="Get the workout goals of a user by ID.") + get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int( + required=True), description="Get the current and max workout streak of a user.") get_hourly_average_capacities_by_facility_id = graphene.List( HourlyAverageCapacity, facility_id=graphene.Int(), description="Get all facility hourly average capacities." ) @@ -297,14 +299,14 @@ def resolve_get_weekly_workout_days(self, info, id): def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query - + def resolve_get_workout_goals(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: raise GraphQLError("User with the given ID does not exist.") return [day.value for day in user.workout_goal] if user.workout_goal else [] - + def resolve_get_user_streak(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -343,7 +345,7 @@ def resolve_get_user_streak(self, info, id): max_streak = max(max_streak, streak) return {"active_streak": active_streak, "max_streak": max_streak} - + def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] if facility_id not in valid_facility_ids: @@ -460,15 +462,19 @@ class logWorkout(graphene.Mutation): class Arguments: workout_time = graphene.DateTime(required=True) user_id = graphene.Int(required=True) + facility_id = graphene.Int(required=True) Output = Workout - def mutate(self, info, workout_time, user_id): + def mutate(self, info, workout_time, user_id, facility_id): user = User.get_query(info).filter(UserModel.id == user_id).first() if not user: raise GraphQLError("User with given ID does not exist.") + facility = Facility.get_query(info).filter(FacilityModel.id == facility_id).first() + if not facility: + raise GraphQLError("Facility with given ID does not exist.") - workout = WorkoutModel(workout_time=workout_time, user_id=user.id) + workout = WorkoutModel(workout_time=workout_time, user_id=user.id, facility_id=facility.id) db_session.add(workout) db_session.commit() From 925367944a14326c26cf7905434bb273d94a7c3d Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Fri, 7 Mar 2025 14:15:40 -0500 Subject: [PATCH 49/53] add mutation --- app_factory.py | 1 + .../f02ca08b34d7_add_token_blacklist_table.py | 43 +++++++++++++++++++ schema.graphql | 16 ++----- src/schema.py | 7 ++- 4 files changed, 52 insertions(+), 15 deletions(-) create mode 100644 migrations/versions/f02ca08b34d7_add_token_blacklist_table.py diff --git a/app_factory.py b/app_factory.py index 9754b75..97c6a7b 100644 --- a/app_factory.py +++ b/app_factory.py @@ -62,6 +62,7 @@ def create_app(run_migrations=False): jwt = JWTManager(app) + @jwt.token_in_blocklist_loader def check_if_token_revoked(jwt_header, jwt_payload: dict) -> bool: jti = jwt_payload["jti"] diff --git a/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py b/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py new file mode 100644 index 0000000..9736aed --- /dev/null +++ b/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py @@ -0,0 +1,43 @@ +"""add token_blacklist table + +Revision ID: f02ca08b34d7 +Revises: 6b01a81bb92b +Create Date: 2025-03-06 21:55:54.289783 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'f02ca08b34d7' +down_revision = '6b01a81bb92b' +branch_labels = None +depends_on = None + + +def upgrade(): + # Create the token_blacklist table + op.execute(""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'token_blacklist' + ) THEN + CREATE TABLE token_blacklist ( + id SERIAL PRIMARY KEY, + jti VARCHAR(36) NOT NULL, + expires_at TIMESTAMP NOT NULL + ); + CREATE INDEX ix_token_blacklist_jti ON token_blacklist(jti); + END IF; + END $$; + """) + # Create an index on the jti column for faster lookups + + +def downgrade(): + # Drop the index and then the table + op.execute("DROP TABLE IF EXISTS token_blacklist;") diff --git a/schema.graphql b/schema.graphql index a9b2e67..57d7450 100644 --- a/schema.graphql +++ b/schema.graphql @@ -71,16 +71,6 @@ type CreateReport { scalar DateTime -enum DayOfWeekEnum { - MONDAY - TUESDAY - WEDNESDAY - THURSDAY - FRIDAY - SATURDAY - SUNDAY -} - enum DayOfWeekGraphQLEnum { MONDAY TUESDAY @@ -156,6 +146,8 @@ type HourlyAverageCapacity { history: [Float]! } +scalar JSONString + type LoginUser { accessToken: String refreshToken: String @@ -165,8 +157,6 @@ type LogoutUser { success: Boolean } -scalar JSONString - enum MuscleGroup { ABDOMINALS CHEST @@ -262,7 +252,7 @@ type User { name: String! activeStreak: Int maxStreak: Int - workoutGoal: [DayOfWeekEnum] + workoutGoal: [DayOfWeekGraphQLEnum] giveaways: [Giveaway] } diff --git a/src/schema.py b/src/schema.py index bf6f4fa..73bea3b 100644 --- a/src/schema.py +++ b/src/schema.py @@ -317,6 +317,7 @@ def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query + @jwt_required() def resolve_get_workout_goals(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -324,6 +325,7 @@ def resolve_get_workout_goals(self, info, id): return [day.value for day in user.workout_goal] if user.workout_goal else [] + @jwt_required() def resolve_get_user_streak(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() if not user: @@ -370,6 +372,7 @@ def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id query = HourlyAverageCapacity.get_query(info).filter(HourlyAverageCapacityModel.facility_id == facility_id) return query.all() + # MARK: - Mutation @@ -386,8 +389,8 @@ def mutate(self, info, net_id): return GraphQLError("No user with those credentials. Please create an account and try again.") # Generate JWT token - access_token = create_access_token(identity=user.id) - refresh_token = create_refresh_token(identity=user.id) + access_token = create_access_token(identity=str(user.id)) + refresh_token = create_refresh_token(identity=str(user.id)) user.refresh_token = refresh_token db_session.commit() From 60d09896186d77847453387cea5e34cc75ac1e5f Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Fri, 7 Mar 2025 14:34:09 -0500 Subject: [PATCH 50/53] reformat --- app_factory.py | 1 - dump.rdb | Bin 203 -> 0 bytes src/models/token_blacklist.py | 12 ++++++++++-- src/schema.py | 27 ++++++++++++--------------- 4 files changed, 22 insertions(+), 18 deletions(-) delete mode 100644 dump.rdb diff --git a/app_factory.py b/app_factory.py index 97c6a7b..9754b75 100644 --- a/app_factory.py +++ b/app_factory.py @@ -62,7 +62,6 @@ def create_app(run_migrations=False): jwt = JWTManager(app) - @jwt.token_in_blocklist_loader def check_if_token_revoked(jwt_header, jwt_payload: dict) -> bool: jti = jwt_payload["jti"] diff --git a/dump.rdb b/dump.rdb deleted file mode 100644 index 8ad52aedda30e49203b7ff023b26c4e10aa95140..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 203 zcmWG?b@2=~FfcUy#aWb^l3A=H&s-48NI}{_NQ4_;V^F0|SFfYI35vSyHNz zu3?I0vaU(0rJ1f}vWbDNfw^Ilfq}7+MY4q@dr@jxes*fgpCb?4pxO)!l9CLJQ;c*i pOcITCO@In@6U~wgbd!=zl8sF)Q&N+Y%wgL8``%q%+39fU8UR!VNPGYQ diff --git a/src/models/token_blacklist.py b/src/models/token_blacklist.py index ad2d490..7bd1386 100644 --- a/src/models/token_blacklist.py +++ b/src/models/token_blacklist.py @@ -1,9 +1,17 @@ -from sqlalchemy import Column, Float, String, Integer, DateTime -from sqlalchemy.orm import relationship +from sqlalchemy import Column, String, Integer, DateTime from src.database import Base class TokenBlocklist(Base): + """ + Represents a JWT token that has been revoked (blacklisted). + + Attributes: + - `id` The primary key of the token record. + - `jti` The unique identifier (JWT ID) of the token. Indexed for fast lookup. + - `expires_at` The DateTime when the token expires. + """ + __tablename__ = "token_blacklist" id = Column(Integer, primary_key=True) diff --git a/src/schema.py b/src/schema.py index 73bea3b..8d8a233 100644 --- a/src/schema.py +++ b/src/schema.py @@ -1,13 +1,6 @@ import graphene import os -from flask_jwt_extended import ( - create_access_token, - create_refresh_token, - verify_jwt_in_request, - get_jwt_identity, - get_jwt, - jwt_required, -) +from flask_jwt_extended import create_access_token, create_refresh_token, get_jwt_identity, get_jwt, jwt_required from functools import wraps from datetime import datetime, timedelta, timezone from graphene_sqlalchemy import SQLAlchemyObjectType @@ -30,7 +23,6 @@ from src.models.report import Report as ReportModel from src.models.hourly_average_capacity import HourlyAverageCapacity as HourlyAverageCapacityModel from src.database import db_session -from flask import current_app # MARK: - Gym @@ -257,8 +249,14 @@ class Query(graphene.ObjectType): get_workouts_by_id = graphene.List(Workout, id=graphene.Int(), description="Get all of a user's workouts by ID.") activities = graphene.List(Activity) get_all_reports = graphene.List(Report, description="Get all reports.") - get_workout_goals = graphene.List(graphene.String, id=graphene.Int(required=True), description="Get the workout goals of a user by ID.") - get_user_streak = graphene.Field(graphene.JSONString, id=graphene.Int(required=True), description="Get the current and max workout streak of a user.") + get_workout_goals = graphene.List( + graphene.String, id=graphene.Int(required=True), description="Get the workout goals of a user by ID." + ) + get_user_streak = graphene.Field( + graphene.JSONString, + id=graphene.Int(required=True), + description="Get the current and max workout streak of a user.", + ) get_hourly_average_capacities_by_facility_id = graphene.List( HourlyAverageCapacity, facility_id=graphene.Int(), description="Get all facility hourly average capacities." ) @@ -316,7 +314,7 @@ def resolve_get_weekly_workout_days(self, info, id): def resolve_get_all_reports(self, info): query = ReportModel.query.all() return query - + @jwt_required() def resolve_get_workout_goals(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() @@ -324,7 +322,7 @@ def resolve_get_workout_goals(self, info, id): raise GraphQLError("User with the given ID does not exist.") return [day.value for day in user.workout_goal] if user.workout_goal else [] - + @jwt_required() def resolve_get_user_streak(self, info, id): user = User.get_query(info).filter(UserModel.id == id).first() @@ -364,7 +362,7 @@ def resolve_get_user_streak(self, info, id): max_streak = max(max_streak, streak) return {"active_streak": active_streak, "max_streak": max_streak} - + def resolve_get_hourly_average_capacities_by_facility_id(self, info, facility_id): valid_facility_ids = [14492437, 8500985, 7169406, 10055021, 2323580, 16099753, 15446768, 12572681] if facility_id not in valid_facility_ids: @@ -409,7 +407,6 @@ def mutate(self, info): return RefreshAccessToken(new_access_token=new_access_token) -# WHAT happens if a user tries to access this route if they are not logged in? class LogoutUser(graphene.Mutation): success = graphene.Boolean() From 75cf210eecad99311004e780ac8aaa6f391a2da0 Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Fri, 7 Mar 2025 15:31:19 -0500 Subject: [PATCH 51/53] adjust login mutation --- src/schema.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/schema.py b/src/schema.py index 8d8a233..b6deb9a 100644 --- a/src/schema.py +++ b/src/schema.py @@ -390,7 +390,6 @@ def mutate(self, info, net_id): access_token = create_access_token(identity=str(user.id)) refresh_token = create_refresh_token(identity=str(user.id)) - user.refresh_token = refresh_token db_session.commit() return LoginUser(access_token=access_token, refresh_token=refresh_token) From 3cdd61051d1b60c54e03e1652827d2c5f7f46170 Mon Sep 17 00:00:00 2001 From: Sophie Strausberg Date: Wed, 12 Mar 2025 17:48:38 -0400 Subject: [PATCH 52/53] update migration file --- .../7245f58bb00a_add_token_blacklist_table.py | 43 +++++++++++++++++++ .../f02ca08b34d7_add_token_blacklist_table.py | 43 ------------------- schema.graphql | 6 +-- 3 files changed, 44 insertions(+), 48 deletions(-) create mode 100644 migrations/versions/7245f58bb00a_add_token_blacklist_table.py delete mode 100644 migrations/versions/f02ca08b34d7_add_token_blacklist_table.py diff --git a/migrations/versions/7245f58bb00a_add_token_blacklist_table.py b/migrations/versions/7245f58bb00a_add_token_blacklist_table.py new file mode 100644 index 0000000..3607a6e --- /dev/null +++ b/migrations/versions/7245f58bb00a_add_token_blacklist_table.py @@ -0,0 +1,43 @@ +"""add token_blacklist table + +Revision ID: 7245f58bb00a +Revises: 0fde4435424e +Create Date: 2025-03-12 17:46:57.085233 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '7245f58bb00a' +down_revision = '0fde4435424e' +branch_labels = None +depends_on = None + + +def upgrade(): +# Create the token_blacklist table + op.execute(""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'token_blacklist' + ) THEN + CREATE TABLE token_blacklist ( + id SERIAL PRIMARY KEY, + jti VARCHAR(36) NOT NULL, + expires_at TIMESTAMP NOT NULL + ); + CREATE INDEX ix_token_blacklist_jti ON token_blacklist(jti); + END IF; + END $$; + """) +# Create an index on the jti column for faster lookups + + +def downgrade(): +# Drop the index and then the table + op.execute("DROP TABLE IF EXISTS token_blacklist;") diff --git a/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py b/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py deleted file mode 100644 index 9736aed..0000000 --- a/migrations/versions/f02ca08b34d7_add_token_blacklist_table.py +++ /dev/null @@ -1,43 +0,0 @@ -"""add token_blacklist table - -Revision ID: f02ca08b34d7 -Revises: 6b01a81bb92b -Create Date: 2025-03-06 21:55:54.289783 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'f02ca08b34d7' -down_revision = '6b01a81bb92b' -branch_labels = None -depends_on = None - - -def upgrade(): - # Create the token_blacklist table - op.execute(""" - DO $$ - BEGIN - IF NOT EXISTS ( - SELECT 1 - FROM information_schema.tables - WHERE table_name = 'token_blacklist' - ) THEN - CREATE TABLE token_blacklist ( - id SERIAL PRIMARY KEY, - jti VARCHAR(36) NOT NULL, - expires_at TIMESTAMP NOT NULL - ); - CREATE INDEX ix_token_blacklist_jti ON token_blacklist(jti); - END IF; - END $$; - """) - # Create an index on the jti column for faster lookups - - -def downgrade(): - # Drop the index and then the table - op.execute("DROP TABLE IF EXISTS token_blacklist;") diff --git a/schema.graphql b/schema.graphql index 8c689c6..bb22812 100644 --- a/schema.graphql +++ b/schema.graphql @@ -177,14 +177,10 @@ type Mutation { createUser(email: String!, name: String!, netId: String!): User enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User -<<<<<<< HEAD - logWorkout(userId: Int!, workoutTime: DateTime!): Workout + logWorkout(facilityId: Int!, userId: Int!, workoutTime: DateTime!): Workout loginUser(netId: String!): LoginUser logoutUser: LogoutUser refreshAccessToken: RefreshAccessToken -======= - logWorkout(facilityId: Int!, userId: Int!, workoutTime: DateTime!): Workout ->>>>>>> f9a58592618c0a8743aeeaadf2753e083905cb66 createReport(createdAt: DateTime!, description: String!, gymId: Int!, issue: String!): CreateReport deleteUser(userId: Int!): User } From db73b5b182aeef84fd38b65f8f40d67fc0d01d7a Mon Sep 17 00:00:00 2001 From: Kevin Biliguun Date: Sun, 16 Mar 2025 23:03:40 -0400 Subject: [PATCH 53/53] Added photo storing for users --- ...9ce06ff5_added_photo_url_column_to_user.py | 28 +++++++ schema.graphql | 4 +- src/models/user.py | 2 + src/schema.py | 75 ++++++++++++++++++- 4 files changed, 105 insertions(+), 4 deletions(-) create mode 100644 migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py diff --git a/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py b/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py new file mode 100644 index 0000000..6ce6601 --- /dev/null +++ b/migrations/versions/add99ce06ff5_added_photo_url_column_to_user.py @@ -0,0 +1,28 @@ +"""added encoded_image column to user + +Revision ID: add99ce06ff5 +Revises: 7245f58bb00a +Create Date: 2025-03-12 18:17:26.681109 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'add99ce06ff5' +down_revision = '7245f58bb00a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('users', sa.Column('encoded_image', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users', 'encoded_image') + # ### end Alembic commands ### diff --git a/schema.graphql b/schema.graphql index bb22812..a4aeaea 100644 --- a/schema.graphql +++ b/schema.graphql @@ -174,7 +174,8 @@ enum MuscleGroup { type Mutation { createGiveaway(name: String!): Giveaway - createUser(email: String!, name: String!, netId: String!): User + createUser(email: String!, encodedImage: String, name: String!, netId: String!): User + editUser(email: String, encodedImage: String, name: String, netId: String!): User enterGiveaway(giveawayId: Int!, userNetId: String!): GiveawayInstance setWorkoutGoals(userId: Int!, workoutGoal: [String]!): User logWorkout(facilityId: Int!, userId: Int!, workoutTime: DateTime!): Workout @@ -253,6 +254,7 @@ type User { activeStreak: Int maxStreak: Int workoutGoal: [DayOfWeekGraphQLEnum] + encodedImage: String giveaways: [Giveaway] } diff --git a/src/models/user.py b/src/models/user.py index 5a99ee2..9e608a7 100644 --- a/src/models/user.py +++ b/src/models/user.py @@ -17,6 +17,7 @@ class User(Base): - `active_streak` The number of consecutive weeks the user has met their personal goal. - `max_streak` The maximum number of consecutive weeks the user has met their personal goal. - `workout_goal` The max number of weeks the user has met their personal goal. + - `encoded_image` The profile picture URL of the user. """ __tablename__ = "users" @@ -29,3 +30,4 @@ class User(Base): active_streak = Column(Integer, nullable=True) max_streak = Column(Integer, nullable=True) workout_goal = Column(ARRAY(Enum(DayOfWeekEnum)), nullable=True) + encoded_image = Column(String, nullable=True) \ No newline at end of file diff --git a/src/schema.py b/src/schema.py index 08aa027..e3d27bb 100644 --- a/src/schema.py +++ b/src/schema.py @@ -23,7 +23,9 @@ from src.models.report import Report as ReportModel from src.models.hourly_average_capacity import HourlyAverageCapacity as HourlyAverageCapacityModel from src.database import db_session - +import requests +import json +import os # MARK: - Gym @@ -427,21 +429,87 @@ class Arguments: name = graphene.String(required=True) net_id = graphene.String(required=True) email = graphene.String(required=True) + encoded_image = graphene.String(required=False) Output = User - def mutate(self, info, name, net_id, email): + def mutate(self, info, name, net_id, email, encoded_image=None): # Check if a user with the given NetID already exists existing_user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + final_photo_url = None if existing_user: raise GraphQLError("NetID already exists.") - new_user = UserModel(name=name, net_id=net_id, email=email) + if encoded_image: + upload_url = os.getenv("DIGITAL_OCEAN_URL") + payload = { + "bucket": os.getenv("BUCKET_NAME"), + "image": encoded_image # Base64-encoded image string + } + headers = {"Content-Type": "application/json"} + try: + response = requests.post(upload_url, json=payload, headers=headers) + response.raise_for_status() + json_response = response.json() + final_photo_url = json_response.get("data") + if not final_photo_url: + raise GraphQLError("No URL returned from upload service.") + except requests.exceptions.RequestException as e: + print(f"Request failed: {e}") + raise GraphQLError("Failed to upload photo.") + + new_user = UserModel(name=name, net_id=net_id, email=email, encoded_image=final_photo_url) db_session.add(new_user) db_session.commit() return new_user + +class EditUser(graphene.Mutation): + class Arguments: + name = graphene.String(required=False) + net_id = graphene.String(required=True) + email = graphene.String(required=False) + encoded_image = graphene.String(required=False) + Output = User + + def mutate(self, info, net_id, name=None, email=None, encoded_image=None): + existing_user = db_session.query(UserModel).filter(UserModel.net_id == net_id).first() + if not existing_user: + raise GraphQLError("User with given net id does not exist.") + + if name is not None: + existing_user.name = name + if email is not None: + existing_user.email = email + if encoded_image is not None: + upload_url = os.getenv("DIGITAL_OCEAN_URL") # Base URL for upload endpoint + if not upload_url: + raise GraphQLError("Upload URL not configured.") + + payload = { + "bucket": os.getenv("BUCKET_NAME", "DEV_BUCKET"), + "image": encoded_image # Base64-encoded image string + } + headers = {"Content-Type": "application/json"} + + print(f"Uploading image with payload: {payload}") + + try: + response = requests.post(upload_url, json=payload, headers=headers) + response.raise_for_status() + json_response = response.json() + print(f"Upload API response: {json_response}") + final_photo_url = json_response.get("data") + if not final_photo_url: + raise GraphQLError("No URL returned from upload service.") + existing_user.encoded_image = final_photo_url + except requests.exceptions.RequestException as e: + print(f"Request failed: {e}") + raise GraphQLError("Failed to upload photo.") + + db_session.commit() + return existing_user class EnterGiveaway(graphene.Mutation): class Arguments: @@ -598,6 +666,7 @@ def mutate(self, info, user_id): class Mutation(graphene.ObjectType): create_giveaway = CreateGiveaway.Field(description="Creates a new giveaway.") create_user = CreateUser.Field(description="Creates a new user.") + edit_user = EditUser.Field(description="Edit a new user.") enter_giveaway = EnterGiveaway.Field(description="Enters a user into a giveaway.") set_workout_goals = SetWorkoutGoals.Field(description="Set a user's workout goals.") log_workout = logWorkout.Field(description="Log a user's workout.")