From a147b09c3380d6a7af84476af9d702cbc3a025ac Mon Sep 17 00:00:00 2001 From: Michael P Date: Sun, 21 Feb 2016 13:07:33 -0800 Subject: [PATCH 01/33] Fix elevation, get additional data like cadence, heart rate, elevations, formatting changes for input to Google Spreadsheets. --- gcexport.py | 147 +++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 116 insertions(+), 31 deletions(-) diff --git a/gcexport.py b/gcexport.py index 2c6fec7..250fe99 100755 --- a/gcexport.py +++ b/gcexport.py @@ -1,9 +1,12 @@ #!/usr/bin/python +# -*- coding: utf-8 -*- """ File: gcexport.py -Author: Kyle Krafka (https://github.com/kjkjava/) +Original author: Kyle Krafka (https://github.com/kjkjava/) Date: April 28, 2015 +Fork author: Michae P (https://github.com/moderation/) +Date: February 21, 2016 Description: Use this script to export your fitness data from Garmin Connect. See README.md for more information. @@ -129,7 +132,67 @@ def http_req(url, post=None, headers={}): # Write header to CSV file if not csv_existed: - csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') + csv_file.write('Activity name,\ +Description,\ +Begin timestamp,\ +Duration (h:m:s),\ +Moving duration (h:m:s),\ +Distance (km),\ +Average speed (km/h),\ +Average moving speed (km/h),\ +Max. speed (km/h),\ +Elevation loss uncorrected (m),\ +Elevation gain uncorrected (m),\ +Elevation min. uncorrected (m),\ +Elevation max. uncorrected (m),\ +Min. heart rate (bpm),\ +Max. heart rate (bpm),\ +Average heart rate (bpm),\ +Calories,\ +Avg. cadence (rpm),\ +Max. cadence (rpm),\ +Strokes,\ +Avg. temp (°C),\ +Min. temp (°C),\ +Max. temp (°C),\ +Map,\ +End timestamp,\ +Begin timestamp (ms),\ +End timestamp (ms),\ +Device,\ +Activity type,\ +Event type,\ +Time zone,\ +Begin latitude (°DD),\ +Begin longitude (°DD),\ +End latitude (°DD),\ +End longitude (°DD),\ +Elevation gain corrected (m),\ +Elevation loss corrected (m),\ +Elevation max. corrected (m),\ +Elevation min. corrected (m),\ +Sample count\n') + + +# Max. Elevation,\ +# Average Moving Speed,\ +# Max. Speed,\ +# Calories,\ +# Duration (Raw Seconds),\ +# Moving Duration (Raw Seconds),\ +# Average Speed,\ +# Distance,\ +# Min. Elevation,\ +# Elevation Gain,\ +# Elevation Loss,\ +# Avg Cadence,\ +# Max Cadence,\ +# Avg Temp,\ +# Min Temp,\ +# Max Temp,\ +# Min. elevation (m),\ +# Max. elevation (m),\ +# Activity parent,\ download_all = False if args.count == 'all': @@ -152,9 +215,11 @@ def http_req(url, post=None, headers={}): search_params = {'start': total_downloaded, 'limit': num_to_download} # Query Garmin Connect + # print url_gc_search + urlencode(search_params) result = http_req(url_gc_search + urlencode(search_params)) + # print result json_results = json.loads(result) # TODO: Catch possible exceptions here. - + search = json_results['results']['search'] @@ -239,49 +304,69 @@ def http_req(url, post=None, headers={}): csv_record = '' - csv_record += empty_record if 'activityId' not in a['activity'] else '"' + a['activity']['activityId'].replace('"', '""') + '",' csv_record += empty_record if 'activityName' not in a['activity'] else '"' + a['activity']['activityName']['value'].replace('"', '""') + '",' csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + a['activity']['activityDescription']['value'].replace('"', '""') + '",' csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' + csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else a['activity']['sumElapsedDuration']['display'].replace('"', '""') + ',' + csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else a['activity']['sumMovingDuration']['display'].replace('"', '""') + ',' + csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'lossUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['lossUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'gainUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['gainUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'minUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['minUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'maxUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['maxUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'minHeartRate' not in a['activity'] else '"' + a['activity']['minHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'weightedMeanBikeCadence' not in a['activity'] else '"' + a['activity']['weightedMeanBikeCadence']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'maxBikeCadence' not in a['activity'] else '"' + a['activity']['maxBikeCadence']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'sumStrokes' not in a['activity'] else '"' + a['activity']['sumStrokes']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'weightedMeanAirTemperature' not in a['activity'] else '"' + a['activity']['weightedMeanAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'minAirTemperature' not in a['activity'] else '"' + a['activity']['minAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'maxAirTemperature' not in a['activity'] else '"' + a['activity']['maxAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'activityId' not in a['activity'] else '"https://connect.garmin.com/modern/activity/' + a['activity']['activityId'].replace('"', '""') + '",' csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['millis'].replace('"', '""') + '",' csv_record += empty_record if 'device' not in a['activity'] else '"' + a['activity']['device']['display'].replace('"', '""') + ' ' + a['activity']['device']['version'].replace('"', '""') + '",' - csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['parent']['display'].replace('"', '""') + '",' csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['display'].replace('"', '""') + '",' csv_record += empty_record if 'eventType' not in a['activity'] else '"' + a['activity']['eventType']['display'].replace('"', '""') + '",' csv_record += empty_record if 'activityTimeZone' not in a['activity'] else '"' + a['activity']['activityTimeZone']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' csv_record += empty_record if 'beginLatitude' not in a['activity'] else '"' + a['activity']['beginLatitude']['value'].replace('"', '""') + '",' csv_record += empty_record if 'beginLongitude' not in a['activity'] else '"' + a['activity']['beginLongitude']['value'].replace('"', '""') + '",' csv_record += empty_record if 'endLatitude' not in a['activity'] else '"' + a['activity']['endLatitude']['value'].replace('"', '""') + '",' csv_record += empty_record if 'endLongitude' not in a['activity'] else '"' + a['activity']['endLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'minHeartRate' not in a['activity'] else '"' + a['activity']['minHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['value'].replace('"', '""') + '"' + csv_record += empty_record if 'gainCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['gainCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'lossCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['lossCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'maxCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['maxCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'minCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['minCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'sumSampleCountDuration' not in a['activity'] else '"' + a['activity']['sumSampleCountDuration']['value'].replace('"', '""') + '"' csv_record += '\n' +# csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' +# csv_record += empty_record if 'minElevation' not in a['activity'] else '"' + a['activity']['minElevation']['value'].replace('"', '""') + '",' +# csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' +# csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" +# csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" +# csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['display'].replace('"', '""') + '",' +# csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['value'].replace('"', '""') + '",' +# csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['value'].replace('"', '""') + '",' +# csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'minElevation' not in a['activity'] else '"' + a['activity']['minElevation']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['withUnit'].replace('"', '""') + '",' +# csv_record += empty_record if 'weightedMeanBikeCadence' not in a['activity'] else '"' + a['activity']['weightedMeanBikeCadence']['withUnitAbbr'].replace('"', '""') + '",' +# csv_record += empty_record if 'maxBikeCadence' not in a['activity'] else '"' + a['activity']['maxBikeCadence']['withUnitAbbr'].replace('"', '""') + '",' +# csv_record += empty_record if 'weightedMeanAirTemperature' not in a['activity'] else '"' + a['activity']['weightedMeanAirTemperature']['withUnitAbbr'].replace('"', '""') + '",' +# csv_record += empty_record if 'minAirTemperature' not in a['activity'] else '"' + a['activity']['minAirTemperature']['withUnitAbbr'].replace('"', '""') + '",' +# csv_record += empty_record if 'maxAirTemperature' not in a['activity'] else '"' + a['activity']['maxAirTemperature']['withUnitAbbr'].replace('"', '""') + '",' +# csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['parent']['display'].replace('"', '""') + '",' + csv_file.write(csv_record.encode('utf8')) if args.format == 'gpx': From 14936047cfba5f35bf74ad9854e76a308710ccf7 Mon Sep 17 00:00:00 2001 From: Michael P Date: Wed, 31 Aug 2016 13:36:01 -0700 Subject: [PATCH 02/33] Huge refactor after Garmin deprecated old APIs and modified login and authentication requirements. --- gcexport.py | 196 ++++++++++++++++++--------- old/garmin-connect-export.php | 245 ---------------------------------- 2 files changed, 135 insertions(+), 306 deletions(-) delete mode 100644 old/garmin-connect-export.php diff --git a/gcexport.py b/gcexport.py index 250fe99..4e7dddf 100755 --- a/gcexport.py +++ b/gcexport.py @@ -21,8 +21,9 @@ from os import mkdir from os import remove from xml.dom.minidom import parseString +from subprocess import call -import urllib2, cookielib, json +import urllib, urllib2, cookielib, json from fileinput import filename import argparse @@ -61,18 +62,26 @@ cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar)) +# print cookie_jar # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers={}): request = urllib2.Request(url) - request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. + # request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. + request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2816.0 Safari/537.36') # Tell Garmin we're some supported browser. for header_key, header_value in headers.iteritems(): request.add_header(header_key, header_value) if post: + # print "POSTING" post = urlencode(post) # Convert dictionary to POST parameter string. + # print request.headers + # print cookie_jar + # print post + # print request response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError. # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. + # print response.getcode() if response.getcode() != 200: raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) @@ -90,36 +99,94 @@ def http_req(url, post=None, headers={}): # Maximum number of activities you can request at once. Set and enforced by Garmin. limit_maximum = 100 +hostname_url = http_req('http://connect.garmin.com/gauth/hostname') +# print hostname_url +hostname = json.loads(hostname_url)['host'] + +REDIRECT = "https://connect.garmin.com/post-auth/login" +BASE_URL = "http://connect.garmin.com/en-US/signin" +GAUTH = "http://connect.garmin.com/gauth/hostname" +SSO = "https://sso.garmin.com/sso" +CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.1-min.css" + +data = {'service': REDIRECT, + 'webhost': hostname, + 'source': BASE_URL, + 'redirectAfterAccountLoginUrl': REDIRECT, + 'redirectAfterAccountCreationUrl': REDIRECT, + 'gauthHost': SSO, + 'locale': 'en_US', + 'id': 'gauth-widget', + 'cssUrl': CSS, + 'clientId': 'GarminConnect', + 'rememberMeShown': 'true', + 'rememberMeChecked': 'false', + 'createAccountShown': 'true', + 'openCreateAccount': 'false', + 'usernameShown': 'false', + 'displayNameShown': 'false', + 'consumeServiceTicket': 'false', + 'initialFocus': 'true', + 'embedWidget': 'false', + 'generateExtraServiceTicket': 'false'} + +print urllib.urlencode(data) + # URLs for various services. -url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' +# url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' +url_gc_login = 'https://sso.garmin.com/sso/login?' + urllib.urlencode(data) url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?' -url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' -url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' -url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' +url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' +url_gc_gpx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/' +url_gc_tcx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' +# url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' +# url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/gpx/activity/' +# url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/tcx/activity/' # Initially, we need to get a valid session cookie, so we pull the login page. +print 'Request login page' http_req(url_gc_login) +print 'Finish login page' # Now we'll actually login. post_data = {'username': username, 'password': password, 'embed': 'true', 'lt': 'e1s1', '_eventId': 'submit', 'displayNameRequired': 'false'} # Fields that are passed in a typical Garmin login. +print 'Post login data' http_req(url_gc_login, post_data) +print 'Finish login post' # Get the key. # TODO: Can we do this without iterating? login_ticket = None +print "-------COOKIE" for cookie in cookie_jar: + print cookie.name + ": " + cookie.value if cookie.name == 'CASTGC': login_ticket = cookie.value + print login_ticket + print cookie.value break +print "-------COOKIE" if not login_ticket: raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') # Chop of 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] +# print login_ticket +print 'Request authentication' +# print url_gc_post_auth + 'ticket=' + login_ticket http_req(url_gc_post_auth + 'ticket=' + login_ticket) +print 'Finished authentication' + +# https://github.com/kjkjava/garmin-connect-export/issues/18#issuecomment-243859319 +print "Call modern" +http_req("http://connect.garmin.com/modern") +print "Finish modern" +print "Call legacy session" +http_req("https://connect.garmin.com/legacy/session") +print "Finish legacy session" # We should be logged in now. if not isdir(args.directory): @@ -215,17 +282,18 @@ def http_req(url, post=None, headers={}): search_params = {'start': total_downloaded, 'limit': num_to_download} # Query Garmin Connect - # print url_gc_search + urlencode(search_params) + print "Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + print url_gc_search + urlencode(search_params) result = http_req(url_gc_search + urlencode(search_params)) - # print result + print "Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + print result json_results = json.loads(result) # TODO: Catch possible exceptions here. - - search = json_results['results']['search'] + # search = json_results['results']['search'] if download_all: # Modify total_to_download based on how many activities the server reports. - total_to_download = int(search['totalFound']) + total_to_download = int(json_results['totalFound']) # Do it only once. download_all = False @@ -235,30 +303,32 @@ def http_req(url, post=None, headers={}): # Process each activity. for a in activities: # Display which entry we're working on. - print 'Garmin Connect activity: [' + a['activity']['activityId'] + ']', - print a['activity']['activityName']['value'] - print '\t' + a['activity']['beginTimestamp']['display'] + ',', - if 'sumElapsedDuration' in a['activity']: - print a['activity']['sumElapsedDuration']['display'] + ',', + print 'Garmin Connect activity: [' + str(a['activity']['activityId']) + ']', + print a['activity']['activityName'] + print '\t' + a['activity']['activitySummary']['BeginTimestamp']['display'] + ',', + if 'SumElapsedDuration' in a['activity']['activitySummary']: + print a['activity']['activitySummary']['SumElapsedDuration']['display'] + ',', else: print '??:??:??,', - if 'sumDistance' in a['activity']: - print a['activity']['sumDistance']['withUnit'] + if 'SumDistance' in a['activity']['activitySummary']: + print a['activity']['activitySummary']['SumDistance']['withUnit'] else: print '0.00 Miles' if args.format == 'gpx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.gpx' - download_url = url_gc_gpx_activity + a['activity']['activityId'] + '?full=true' + data_filename = args.directory + '/activity_' + str(a['activity']['activityId']) + '.gpx' + download_url = url_gc_gpx_activity + str(a['activity']['activityId']) + '?full=true' + # download_url = url_gc_gpx_activity + str(a['activity']['activityId']) + '?full=true' + '&original=true' + print download_url file_mode = 'w' elif args.format == 'tcx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.tcx' - download_url = url_gc_tcx_activity + a['activity']['activityId'] + '?full=true' + data_filename = args.directory + '/activity_' + str(a['activity']['activityId']) + '.tcx' + download_url = url_gc_tcx_activity + str(a['activity']['activityId']) + '?full=true' file_mode = 'w' elif args.format == 'original': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.zip' - fit_filename = args.directory + '/' + a['activity']['activityId'] + '.fit' - download_url = url_gc_original_activity + a['activity']['activityId'] + data_filename = args.directory + '/activity_' + str(a['activity']['activityId']) + '.zip' + fit_filename = args.directory + '/' + str(a['activity']['activityId']) + '.fit' + download_url = url_gc_original_activity + str(a['activity']['activityId']) file_mode = 'wb' else: raise Exception('Unrecognized format.') @@ -304,46 +374,46 @@ def http_req(url, post=None, headers={}): csv_record = '' - csv_record += empty_record if 'activityName' not in a['activity'] else '"' + a['activity']['activityName']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + a['activity']['activityDescription']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else a['activity']['sumElapsedDuration']['display'].replace('"', '""') + ',' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else a['activity']['sumMovingDuration']['display'].replace('"', '""') + ',' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'lossUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['lossUncorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'gainUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['gainUncorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'minUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['minUncorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'maxUncorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['maxUncorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'minHeartRate' not in a['activity'] else '"' + a['activity']['minHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanBikeCadence' not in a['activity'] else '"' + a['activity']['weightedMeanBikeCadence']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxBikeCadence' not in a['activity'] else '"' + a['activity']['maxBikeCadence']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumStrokes' not in a['activity'] else '"' + a['activity']['sumStrokes']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanAirTemperature' not in a['activity'] else '"' + a['activity']['weightedMeanAirTemperature']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'minAirTemperature' not in a['activity'] else '"' + a['activity']['minAirTemperature']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxAirTemperature' not in a['activity'] else '"' + a['activity']['maxAirTemperature']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'activityId' not in a['activity'] else '"https://connect.garmin.com/modern/activity/' + a['activity']['activityId'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['millis'].replace('"', '""') + '",' + csv_record += empty_record if 'activityName' not in a['activity'] else '"' + a['activity']['activityName'].replace('"', '""') + '",' + csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + a['activity']['activityDescription'].replace('"', '""') + '",' + csv_record += empty_record if 'BeginTimestamp' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['BeginTimestamp']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'SumElapsedDuration' not in a['activity']['activitySummary'] else a['activity']['activitySummary']['SumElapsedDuration']['display'].replace('"', '""') + ',' + csv_record += empty_record if 'SumMovingDuration' not in a['activity']['activitySummary'] else a['activity']['activitySummary']['SumMovingDuration']['display'].replace('"', '""') + ',' + csv_record += empty_record if 'SumDistance' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['SumDistance']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'WeightedMeanSpeed' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['WeightedMeanSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'WeightedMeanMovingSpeed' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['WeightedMeanMovingSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'MaxSpeed' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MaxSpeed']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'LossUncorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['LossUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'GainUncorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['GainUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'MinUncorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['MinUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'MaxUncorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['MaxUncorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'MinHeartRate' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MinHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'MaxHeartRate' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MaxHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'WeightedMeanHeartRate' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['WeightedMeanHeartRate']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'SumEnergy' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['SumEnergy']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'WeightedMeanBikeCadence' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['WeightedMeanBikeCadence']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'MaxBikeCadence' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MaxBikeCadence']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'SumStrokes' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['SumStrokes']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'WeightedMeanAirTemperature' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['WeightedMeanAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'MinAirTemperature' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MinAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'MaxAirTemperature' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['MaxAirTemperature']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'activityId' not in a['activity'] else '"https://connect.garmin.com/modern/activity/' + str(a['activity']['activityId']).replace('"', '""') + '",' + csv_record += empty_record if 'EndTimestamp' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['EndTimestamp']['display'].replace('"', '""') + '",' + csv_record += empty_record if 'BeginTimestamp' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['BeginTimestamp']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'EndTimestamp' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['EndTimestamp']['value'].replace('"', '""') + '",' csv_record += empty_record if 'device' not in a['activity'] else '"' + a['activity']['device']['display'].replace('"', '""') + ' ' + a['activity']['device']['version'].replace('"', '""') + '",' csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['display'].replace('"', '""') + '",' csv_record += empty_record if 'eventType' not in a['activity'] else '"' + a['activity']['eventType']['display'].replace('"', '""') + '",' csv_record += empty_record if 'activityTimeZone' not in a['activity'] else '"' + a['activity']['activityTimeZone']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLatitude' not in a['activity'] else '"' + a['activity']['beginLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLongitude' not in a['activity'] else '"' + a['activity']['beginLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLatitude' not in a['activity'] else '"' + a['activity']['endLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLongitude' not in a['activity'] else '"' + a['activity']['endLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'gainCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['gainCorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'lossCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['lossCorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'maxCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['maxCorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'minCorrectedElevation' not in a['activity'] else '"' + str(float(a['activity']['minCorrectedElevation']['value'])/100) + '",' - csv_record += empty_record if 'sumSampleCountDuration' not in a['activity'] else '"' + a['activity']['sumSampleCountDuration']['value'].replace('"', '""') + '"' + csv_record += empty_record if 'BeginLatitude' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['BeginLatitude']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'BeginLongitude' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['BeginLongitude']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'EndLatitude' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['EndLatitude']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'EndLongitude' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['EndLongitude']['value'].replace('"', '""') + '",' + csv_record += empty_record if 'GainCorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['GainCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'LossCorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['LossCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'MaxCorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['MaxCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'MinCorrectedElevation' not in a['activity']['activitySummary'] else '"' + str(float(a['activity']['activitySummary']['MinCorrectedElevation']['value'])/100) + '",' + csv_record += empty_record if 'SumSampleCountDuration' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['SumSampleCountDuration']['value'].replace('"', '""') + '"' csv_record += '\n' # csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' @@ -398,4 +468,8 @@ def http_req(url, post=None, headers={}): csv_file.close() +print 'Open CSV output.' +print csv_filename +# call(["open", csv_filename]) + print 'Done!' diff --git a/old/garmin-connect-export.php b/old/garmin-connect-export.php deleted file mode 100644 index ab55c3d..0000000 --- a/old/garmin-connect-export.php +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/php - 1 && ( is_numeric( $argv[1] ) ) ) { - $total_to_download = $argv[1]; -} else if ( $argc > 1 && strcasecmp($argv[1], "all") == 0 ) { - // If the user wants to download all activities, first download one, - // then the result of that request will tell us how many are available - // so we will modify the variables then. - $total_to_download = 1; - $download_all = true; -} else { - $total_to_download = 1; -} -$total_downloaded = 0; - -// This while loop will download data from the server in multiple chunks, if necessary -while( $total_downloaded < $total_to_download ) { - $num_to_download = ($total_to_download - $total_downloaded > 100) ? 100 : ($total_to_download - $total_downloaded); // Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. - - // Query Garmin Connect - $search_opts = array( - 'start' => $total_downloaded, - 'limit' => $num_to_download - ); - - $result = curl( $urlGCSearch . http_build_query( $search_opts ) ); - $json = json_decode( $result ); - - if ( ! $json ) { - echo "Error: "; - switch(json_last_error()) { - case JSON_ERROR_DEPTH: - echo ' - Maximum stack depth exceeded'; - break; - case JSON_ERROR_CTRL_CHAR: - echo ' - Unexpected control character found'; - break; - case JSON_ERROR_SYNTAX: - echo ' - Syntax error, malformed JSON'; - break; - } - echo PHP_EOL; - var_dump( $result ); - die(); - } - - $search = $json->{'results'}->{'search'}; - - if ( $download_all ) { - // Modify $total_to_download based on how many activities the server reports - $total_to_download = intval( $search->{'totalFound'} ); - // Do it only once - $download_all = false; - } - - // Pull out just the list of activities - $activities = $json->{'results'}->{'activities'}; - - // Process each activity. - foreach ( $activities as $a ) { - // Display which entry we're working on. - print "Garmin Connect activity: [" . $a->{'activity'}->{'activityId'} . "] "; - print $a->{'activity'}->{'beginTimestamp'}->{'display'} . ": "; - print $a->{'activity'}->{'activityName'}->{'value'} . "\n"; - - // Write data to CSV - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityId'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityName'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityDescription'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'beginTimestamp'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'beginTimestamp'}->{'millis'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'endTimestamp'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'endTimestamp'}->{'millis'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'device'}->{'display'} . " " . $a->{'activity'}->{'device'}->{'version'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityType'}->{'parent'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityType'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'eventType'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'activityTimeZone'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxElevation'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxElevation'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'beginLatitude'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'beginLongitude'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'endLatitude'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'endLongitude'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'weightedMeanMovingSpeed'}->{'display'}) . "\"," ); // The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'weightedMeanMovingSpeed'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxHeartRate'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'weightedMeanHeartRate'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxSpeed'}->{'display'}) . "\"," ); // The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxSpeed'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumEnergy'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumEnergy'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumElapsedDuration'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumElapsedDuration'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumMovingDuration'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumMovingDuration'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'weightedMeanSpeed'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'weightedMeanSpeed'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumDistance'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'sumDistance'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'minHeartRate'}->{'display'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxElevation'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'maxElevation'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'gainElevation'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'gainElevation'}->{'value'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'lossElevation'}->{'withUnit'}) . "\"," ); - fwrite( $csv_file, "\"" . str_replace("\"", "\"\"", $a->{'activity'}->{'lossElevation'}->{'value'}) . "\""); - fwrite( $csv_file, "\n"); - - // Download the GPX file from Garmin Connect - // TODO: Consider using TCX files? Does Garmin Connect include heart rate data in TCX downloads? - print "\tDownloading GPX file... "; - - $gpx_filename = $activities_directory . '/activity_' . $a->{'activity'}->{'activityId'} . '.gpx'; - $save_file = fopen( $gpx_filename, 'w+' ); - $curl_opts = array( - CURLOPT_FILE => $save_file - ); - curl( $urlGCActivity . $a->{'activity'}->{'activityId'} . '?full=true', array(), array(), $curl_opts ); - fclose( $save_file ); - - // Validate the GPX data. If we have an activity without GPS data (e.g. running on a treadmill), - // Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. - $gpx = simplexml_load_file( $gpx_filename, 'SimpleXMLElement', LIBXML_NOCDATA ); - $gpxdataexists = ( count( $gpx->trk->trkseg->trkpt ) > 0); - - if ( $gpxdataexists ) { - print "Done. GPX data saved.\n"; - } else { - print "Done. No track points found.\n"; - } - } - - $total_downloaded += $num_to_download; - -// End while loop for multiple chunks -} - -fclose($csv_file); - -print "Done!\n\n"; -// End - -function curl( $url, $post = array(), $head = array(), $opts = array() ) -{ - $cookie_file = '/tmp/cookies.txt'; - $ch = curl_init(); - - //curl_setopt( $ch, CURLOPT_VERBOSE, 1 ); - curl_setopt( $ch, CURLOPT_URL, $url ); - curl_setopt( $ch, CURLOPT_RETURNTRANSFER, 1 ); - curl_setopt( $ch, CURLOPT_ENCODING, "gzip" ); - curl_setopt( $ch, CURLOPT_COOKIEFILE, $cookie_file ); - curl_setopt( $ch, CURLOPT_COOKIEJAR, $cookie_file ); - curl_setopt( $ch, CURLOPT_FOLLOWLOCATION, 1 ); - - foreach ( $opts as $k => $v ) { - curl_setopt( $ch, $k, $v ); - } - - if ( count( $post ) > 0 ) { - // POST mode - curl_setopt( $ch, CURLOPT_POST, 1 ); - curl_setopt( $ch, CURLOPT_POSTFIELDS, $post ); - } - else { - curl_setopt( $ch, CURLOPT_HTTPHEADER, $head ); - curl_setopt( $ch, CURLOPT_CRLF, 1 ); - } - - $success = curl_exec( $ch ); - - if ( curl_errno( $ch ) !== 0 ) { - throw new Exception( sprintf( '%s: CURL Error %d: %s', __CLASS__, curl_errno( $ch ), curl_error( $ch ) ) ); - } - - if ( curl_getinfo( $ch, CURLINFO_HTTP_CODE ) !== 200 ) { - if ( curl_getinfo( $ch, CURLINFO_HTTP_CODE ) !== 201 ) { - throw new Exception( sprintf( 'Bad return code(%1$d) for: %2$s', curl_getinfo( $ch, CURLINFO_HTTP_CODE ), $url ) ); - } - } - - curl_close( $ch ); - return $success; -} - -?> From ceec54b99a95bced5ad219f836de6ae4558ca2a3 Mon Sep 17 00:00:00 2001 From: Chris McCarty Date: Wed, 7 Sep 2016 18:06:27 -0400 Subject: [PATCH 03/33] Fixed keyError when parsing totalFound results --- gcexport.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gcexport.py b/gcexport.py index 4e7dddf..22d1a5a 100755 --- a/gcexport.py +++ b/gcexport.py @@ -293,7 +293,8 @@ def http_req(url, post=None, headers={}): if download_all: # Modify total_to_download based on how many activities the server reports. - total_to_download = int(json_results['totalFound']) + total_to_download = int(json_results['results']['totalFound']) + # Do it only once. download_all = False From 9d2a62bbeab978c4efdf5b55bb0cdb06ecf04ea2 Mon Sep 17 00:00:00 2001 From: Michael P Date: Wed, 19 Oct 2016 10:43:41 +1030 Subject: [PATCH 04/33] GPX broken again. --- gcexport.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gcexport.py b/gcexport.py index 22d1a5a..fcb1369 100755 --- a/gcexport.py +++ b/gcexport.py @@ -44,7 +44,7 @@ parser.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to download, or 'all' (default: 1)") -parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", +parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="tcx", help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, @@ -133,13 +133,13 @@ def http_req(url, post=None, headers={}): print urllib.urlencode(data) # URLs for various services. -# url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' url_gc_login = 'https://sso.garmin.com/sso/login?' + urllib.urlencode(data) url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?' url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' url_gc_gpx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/' url_gc_tcx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' +# url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' # url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' # url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/gpx/activity/' # url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/tcx/activity/' @@ -294,7 +294,7 @@ def http_req(url, post=None, headers={}): if download_all: # Modify total_to_download based on how many activities the server reports. total_to_download = int(json_results['results']['totalFound']) - + # Do it only once. download_all = False From c21bbb7c5726a8a9b97c3c84ea55b5bfd2babc82 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Wed, 30 Nov 2016 13:45:50 -0800 Subject: [PATCH 05/33] Persist activity JSON. --- gcexport.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/gcexport.py b/gcexport.py index fcb1369..c02fbd4 100755 --- a/gcexport.py +++ b/gcexport.py @@ -286,7 +286,13 @@ def http_req(url, post=None, headers={}): print url_gc_search + urlencode(search_params) result = http_req(url_gc_search + urlencode(search_params)) print "Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - print result + + # Persist JSON + json_filename = args.directory + '/activities.json' + json_file = open(json_filename, 'a') + json_file.write(result) + json_file.close() + json_results = json.loads(result) # TODO: Catch possible exceptions here. # search = json_results['results']['search'] From f1d3a34996c4ff078e07c72a8fb432e07efe6f68 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Wed, 30 Nov 2016 13:47:27 -0800 Subject: [PATCH 06/33] Re-enable export to GPX. --- gcexport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcexport.py b/gcexport.py index c02fbd4..47c5088 100755 --- a/gcexport.py +++ b/gcexport.py @@ -44,7 +44,7 @@ parser.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to download, or 'all' (default: 1)") -parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="tcx", +parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, From 64c0ff1e4d0589423548569c64485f88f5c51840 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Mon, 23 Jan 2017 09:05:29 -0800 Subject: [PATCH 07/33] Should fix int and str concatenation issue reported at https://github.com/kjkjava/garmin-connect-export/issues/26. --- gcexport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcexport.py b/gcexport.py index 47c5088..4438f29 100755 --- a/gcexport.py +++ b/gcexport.py @@ -83,7 +83,7 @@ def http_req(url, post=None, headers={}): # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. # print response.getcode() if response.getcode() != 200: - raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) + raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) return response.read() From 23e2aaa75ce168c3620bd6e5cafde846c96868c9 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Fri, 22 Sep 2017 09:55:43 -0700 Subject: [PATCH 08/33] Clean up old endpoints and improve HTTP error reporting. --- gcexport.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/gcexport.py b/gcexport.py index 4438f29..9403ff9 100755 --- a/gcexport.py +++ b/gcexport.py @@ -139,10 +139,6 @@ def http_req(url, post=None, headers={}): url_gc_gpx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/' url_gc_tcx_activity = 'https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' -# url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' -# url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' -# url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/gpx/activity/' -# url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.2/tcx/activity/' # Initially, we need to get a valid session cookie, so we pull the login page. print 'Request login page' @@ -370,7 +366,7 @@ def http_req(url, post=None, headers={}): print 'Writing empty file since there was no original activity data...', data = '' else: - raise Exception('Failed. Got an unexpected HTTP error (' + str(e.code) + ').') + raise Exception('Failed. Got an unexpected HTTP error (' + str(e.code) + download_url +').') save_file = open(data_filename, file_mode) save_file.write(data) From 51122586d4abf2537d9be592e2b9b5dcdfb701c9 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Fri, 22 Sep 2017 10:18:29 -0700 Subject: [PATCH 09/33] Check zip file size & skip unzipping 0Kb files. --- gcexport.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/gcexport.py b/gcexport.py index 9403ff9..1445558 100755 --- a/gcexport.py +++ b/gcexport.py @@ -20,6 +20,7 @@ from os.path import isfile from os import mkdir from os import remove +from os import stat from xml.dom.minidom import parseString from subprocess import call @@ -456,11 +457,15 @@ def http_req(url, post=None, headers={}): elif args.format == 'original': if args.unzip and data_filename[-3:].lower() == 'zip': # Even manual upload of a GPX file is zipped, but we'll validate the extension. print "Unzipping and removing original files...", - zip_file = open(data_filename, 'rb') - z = zipfile.ZipFile(zip_file) - for name in z.namelist(): - z.extract(name, args.directory) - zip_file.close() + print 'Filesize is: ' + str(stat(data_filename).st_size) + if stat(data_filename).st_size > 0: + zip_file = open(data_filename, 'rb') + z = zipfile.ZipFile(zip_file) + for name in z.namelist(): + z.extract(name, args.directory) + zip_file.close() + else: + print 'Skipping 0Kb zip file.' remove(data_filename) print 'Done.' else: From 33bb8551da1b774371779768107ae7f0433c3a71 Mon Sep 17 00:00:00 2001 From: Peter Steiner Date: Tue, 6 Mar 2018 17:29:34 +0100 Subject: [PATCH 10/33] Use `limit_maximum` instead of magic number Reduce maximum to 19, as everything bigger gets a 500 status: Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?start=0&limit=1 Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Garmin Connect activity: [NNNNNNNNNN] (activity name) Tue, 6 Mar 2018 10:23, 00:57:44, 11.20 Kilometers https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/NNNNNNNNNN?full=true Downloading file... Done. GPX data saved. Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?start=1&limit=20 Traceback (most recent call last): File "./gcexport.py", line 284, in result = http_req(url_gc_search + urlencode(search_params)) File "./gcexport.py", line 82, in http_req ... File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 558, in http_error_default raise HTTPError(req.get_full_url(), code, msg, hdrs, fp) urllib2.HTTPError: HTTP Error 500: Internal Server Error --- gcexport.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/gcexport.py b/gcexport.py index 1445558..4e52722 100755 --- a/gcexport.py +++ b/gcexport.py @@ -98,7 +98,7 @@ def http_req(url, post=None, headers={}): password = args.password if args.password else getpass() # Maximum number of activities you can request at once. Set and enforced by Garmin. -limit_maximum = 100 +limit_maximum = 19 hostname_url = http_req('http://connect.garmin.com/gauth/hostname') # print hostname_url @@ -271,9 +271,10 @@ def http_req(url, post=None, headers={}): # This while loop will download data from the server in multiple chunks, if necessary. while total_downloaded < total_to_download: - # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. - if total_to_download - total_downloaded > 100: - num_to_download = 100 + # Maximum chunk size 'limit_maximum' ... 400 return status if over maximum. So download maximum or whatever remains if less than maximum. + # As of 2018-03-06 I get return status 500 if over maximum + if total_to_download - total_downloaded > limit_maximum: + num_to_download = limit_maximum else: num_to_download = total_to_download - total_downloaded From 10afedd5e56d131a14d5113368cc80764a15d427 Mon Sep 17 00:00:00 2001 From: Peter Steiner Date: Sun, 11 Mar 2018 11:43:17 +0100 Subject: [PATCH 11/33] Write empty GPX file when there's no GPS data No GPS data is indicated by HTTP status 204 (no content) (reapplying commit 404165f9 to current state) --- gcexport.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/gcexport.py b/gcexport.py index 4e52722..7efcebe 100755 --- a/gcexport.py +++ b/gcexport.py @@ -83,7 +83,12 @@ def http_req(url, post=None, headers={}): # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. # print response.getcode() - if response.getcode() != 200: + if response.getcode() == 204: + # For activities without GPS coordinates, there is no GPX download (204 = no content). + # Write an empty file to prevent redownloading it. + print 'Writing empty file since there was no GPX activity data...', + return '' + elif response.getcode() != 200: raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) return response.read() @@ -444,9 +449,9 @@ def http_req(url, post=None, headers={}): csv_file.write(csv_record.encode('utf8')) - if args.format == 'gpx': + if args.format == 'gpx' and data: # Validate GPX data. If we have an activity without GPS data (e.g., running on a treadmill), - # Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. + # Garmin Connect still kicks out a GPX (sometimes), but there is only activity information, no GPS data. # N.B. You can omit the XML parse (and the associated log messages) to speed things up. gpx = parseString(data) gpx_data_exists = len(gpx.getElementsByTagName('trkpt')) > 0 From dcaffb9d87674e1473c48a637694eea3ade02dac Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Tue, 17 Apr 2018 07:35:48 -0700 Subject: [PATCH 12/33] Large refactor based on Garmin Connect API changes and the excellent fork and issues from https://github.com/pe-st/garmin-connect-export. Signed-off-by: Michael Payne --- README.md | 26 +- gcexport.py => gcexport2.py | 9 +- gcexport3.py | 527 ++++++++++++++++++++++++++++++++++++ 3 files changed, 552 insertions(+), 10 deletions(-) rename gcexport.py => gcexport2.py (98%) create mode 100755 gcexport3.py diff --git a/README.md b/README.md index 689cd72..7b4a3c7 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Usage You will need a little experience running things from the command line to use this script. That said, here are the usage details from the `--help` flag: ``` -usage: gcexport.py [-h] [--version] [--username [USERNAME]] +usage: gcexport3.py [-h] [--version] [--username [USERNAME]] [--password [PASSWORD]] [-c [COUNT]] [-f [{gpx,tcx,original}]] [-d [DIRECTORY]] [-u] @@ -43,11 +43,11 @@ optional arguments: ``` Examples: -`python gcexport.py --count all` will download all of your data to a dated directory. +`python gcexport3.py --count all` will download all of your data to a dated directory. -`python gcexport.py -d ~/MyActivities -c 3 -f original -u --username bobbyjoe --password bestpasswordever1` will download your three most recent activities in the FIT file format (or whatever they were uploaded as) into the `~/MyActivities` directory (unless they already exist). Using the `--username` and `--password` flags are not recommended because your password will be stored in your command line history. Instead, omit them to be prompted (and note that nothing will be displayed when you type your password). +`python gcexport3.py -d ~/MyActivities -c 3 -f original -u --username bobbyjoe --password bestpasswordever1` will download your three most recent activities in the FIT file format (or whatever they were uploaded as) into the `~/MyActivities` directory (unless they already exist). Using the `--username` and `--password` flags are not recommended because your password will be stored in your command line history. Instead, omit them to be prompted (and note that nothing will be displayed when you type your password). -Alternatively, you may run it with `./gcexport.py` if you set the file as executable (i.e., `chmod u+x gcexport.py`). +Alternatively, you may run it with `./gcexport3.py` if you set the file as executable (i.e., `chmod u+x gcexport3.py`). Of course, you must have Python installed to run this. Most Mac and Linux users should already have it. Also, as stated above, you should have some basic command line experience. @@ -63,9 +63,25 @@ Garmin Connect API ------------------ This script is for personal use only. It simulates a standard user session (i.e., in the browser), logging in using cookies and an authorization ticket. This makes the script pretty brittle. If you're looking for a more reliable option, particularly if you wish to use this for some production service, Garmin does offer a paid API service. +### REST endpoints + +As this script doesn't use the paid API, the endpoints to use are known by reverse engineering browser sessions. And as the Garmin Connect website changes over time, chances are that this script gets broken. + +Small history of the endpoint used by `gcexport3.py` to get a list of activities: + +- [activity-search-service-1.0](https://connect.garmin.com/proxy/activity-search-service-1.0/json/activities): initial endpoint used since 2015, worked at least until January 2018 +- [activity-search-service-1.2](https://connect.garmin.com/proxy/activity-search-service-1.2/json/activities): endpoint introduced in `gcexport.py` in August 2016. In March 2018 this still works, but doesn't allow you to fetch more than 20 activities, even split over multiple calls (when doing three consecutive calls with 1,19,19 as `limit` parameter, the third one fails with HTTP error 500). The JSON returned by this endpoint however is quite rich (see example in the `json` folder). +- [activitylist-service](https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities): endpoint introduced in `gcexport.py` in March 2018. The JSON returned by this endpoint is very different from the activity-search-service-1.2 one (also here see the example in the `json` folder), e.g. + - it is concise and offers no redundant information (e.g. only speed, not speed and pace) + - the units are not explicitly given and must be deducted (e.g. the speed unit is m/s) + - there is less information, e.g. there is only one set of elevation values (not both corrected and uncorrected), and other values like minimum heart rate are missing. + - some other information is available only as an ID (e.g. `timeZoneId` or `deviceId`), and complete information might be available by another REST call (I didn't reverse further for the time being) + History ------- -The original project was written in PHP (now in the `old` directory), based on "Garmin Connect export to Dailymile" code at http://www.ciscomonkey.net/gc-to-dm-export/ (link has been down for a while). It no longer works due to the way Garmin handles logins. It could be updated, but I decided to rewrite everything in Python for the latest version. +The original project was written in PHP (formerly in the `old` directory, now deleted), based on "Garmin Connect export to Dailymile" code at http://www.ciscomonkey.net/gc-to-dm-export/ (link has been down for a while). It no longer works due to the way Garmin handles logins. It could be updated, but I decided to rewrite everything in Python for the latest version. + +@moderation forked the original from @kjkjava when the various endpoints stopped working and the original repo wasn't been updated. This fork is primarily designed for my use which is cycling. It has not well been tested against other activity types. In the latest updates (April 2018) I've deprecated the Python 2 version (renamed to gcexport2.py) and this script now requires Python 3. The code has been linted using [pylint3](https://packages.debian.org/sid/pylint3). Contributions ------------- diff --git a/gcexport.py b/gcexport2.py similarity index 98% rename from gcexport.py rename to gcexport2.py index 4e52722..1445558 100755 --- a/gcexport.py +++ b/gcexport2.py @@ -98,7 +98,7 @@ def http_req(url, post=None, headers={}): password = args.password if args.password else getpass() # Maximum number of activities you can request at once. Set and enforced by Garmin. -limit_maximum = 19 +limit_maximum = 100 hostname_url = http_req('http://connect.garmin.com/gauth/hostname') # print hostname_url @@ -271,10 +271,9 @@ def http_req(url, post=None, headers={}): # This while loop will download data from the server in multiple chunks, if necessary. while total_downloaded < total_to_download: - # Maximum chunk size 'limit_maximum' ... 400 return status if over maximum. So download maximum or whatever remains if less than maximum. - # As of 2018-03-06 I get return status 500 if over maximum - if total_to_download - total_downloaded > limit_maximum: - num_to_download = limit_maximum + # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. + if total_to_download - total_downloaded > 100: + num_to_download = 100 else: num_to_download = total_to_download - total_downloaded diff --git a/gcexport3.py b/gcexport3.py new file mode 100755 index 0000000..efde6c9 --- /dev/null +++ b/gcexport3.py @@ -0,0 +1,527 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- + +""" +File: gcexport.py +Original author: Kyle Krafka (https://github.com/kjkjava/) +Date: April 28, 2015 +Fork author: Michael P (https://github.com/moderation/) +Date: February 15, 2018 + +Description: Use this script to export your fitness data from Garmin Connect. + See README.md for more information. + +Activity & event types: + https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties + https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties +""" + +from datetime import datetime, timedelta +from getpass import getpass +from os import mkdir, remove, stat +from os.path import isdir, isfile +from subprocess import call +from sys import argv +from xml.dom.minidom import parseString + +import argparse +import http.cookiejar +import json +import urllib.error +import urllib.parse +import urllib.request +import zipfile + +SCRIPT_VERSION = '1.0.0' +CURRENT_DATE = datetime.now().strftime('%Y-%m-%d') +ACTIVITIES_DIRECTORY = './' + CURRENT_DATE + '_garmin_connect_export' + +PARSER = argparse.ArgumentParser() + +# TODO: Implement verbose and/or quiet options. +# PARSER.add_argument('-v', '--verbose', help="increase output verbosity", action="store_true") +PARSER.add_argument('--version', help="print version and exit", action="store_true") +PARSER.add_argument('--username', help="your Garmin Connect username (otherwise, you will be \ + prompted)", nargs='?') +PARSER.add_argument('--password', help="your Garmin Connect password (otherwise, you will be \ + prompted)", nargs='?') + +PARSER.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to \ + download, or 'all' (default: 1)") + +PARSER.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", + help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") + +PARSER.add_argument('-d', '--directory', nargs='?', default=ACTIVITIES_DIRECTORY, help="the \ + directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") + +PARSER.add_argument('-u', '--unzip', help="if downloading ZIP files (format: 'original'), unzip \ + the file and removes the ZIP file", action="store_true") + +ARGS = PARSER.parse_args() + +if ARGS.version: + print(argv[0] + ", version " + SCRIPT_VERSION) + exit(0) + +COOKIE_JAR = http.cookiejar.CookieJar() +OPENER = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(COOKIE_JAR)) +# print(COOKIE_JAR) + +def hhmmss_from_seconds(sec): + """Helper function that converts seconds to HH:MM:SS time format.""" + return str(timedelta(seconds=int(sec))).zfill(8) + +def kmh_from_mps(mps): + """Helper function that converts meters per second (mps) to km/h.""" + return str(mps * 3.6) + +def write_to_file(filename, content, mode): + """Helper function that persists content to file.""" + write_file = open(filename, mode) + write_file.write(content) + write_file.close() + +# url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. +def http_req(url, post=None, headers=None): + """Helper function that makes the HTTP requests.""" + request = urllib.request.Request(url) + # Tell Garmin we're some supported browser. + request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, \ + like Gecko) Chrome/54.0.2816.0 Safari/537.36') + if headers: + for header_key, header_value in headers.items(): + request.add_header(header_key, header_value) + if post: + # print('POSTING') + post = urllib.parse.urlencode(post) + post = post.encode('utf-8') # Convert dictionary to POST parameter string. + # print(request.headers) + # print(COOKIE_JAR) + # print(post) + # print(request) + response = OPENER.open(request, data=post) # This line may throw a urllib2.HTTPError. + + # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a + # urllib2.HTTPError which is checked for below. + # print(response.getcode()) + if response.getcode() != 200: + raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) + + return response.read() + +print('Welcome to Garmin Connect Exporter!') + +# Create directory for data files. +if isdir(ARGS.directory): + print('Warning: Output directory already exists. Will skip already-downloaded files and \ + append to the CSV file.') + +USERNAME = ARGS.username if ARGS.username else input('Username: ') +PASSWORD = ARGS.password if ARGS.password else getpass() + +# Maximum number of activities you can request at once. Set and enforced by Garmin. +LIMIT_MAXIMUM = 1000 + +WEBHOST = "https://connect.garmin.com" +REDIRECT = "https://connect.garmin.com/post-auth/login" +BASE_URL = "http://connect.garmin.com/en-US/signin" +GAUTH = "http://connect.garmin.com/gauth/hostname" +SSO = "https://sso.garmin.com/sso" +CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.2-min.css" + +DATA = { + 'service': REDIRECT, + 'webhost': WEBHOST, + 'source': BASE_URL, + 'redirectAfterAccountLoginUrl': REDIRECT, + 'redirectAfterAccountCreationUrl': REDIRECT, + 'gauthHost': SSO, + 'locale': 'en_US', + 'id': 'gauth-widget', + 'cssUrl': CSS, + 'clientId': 'GarminConnect', + 'rememberMeShown': 'true', + 'rememberMeChecked': 'false', + 'createAccountShown': 'true', + 'openCreateAccount': 'false', + 'usernameShown': 'false', + 'displayNameShown': 'false', + 'consumeServiceTicket': 'false', + 'initialFocus': 'true', + 'embedWidget': 'false', + 'generateExtraServiceTicket': 'false' + } + +print(urllib.parse.urlencode(DATA)) + +# URLs for various services. +URL_GC_LOGIN = 'https://sso.garmin.com/sso/login?' + urllib.parse.urlencode(DATA) +URL_GC_POST_AUTH = 'https://connect.garmin.com/modern/activities?' +URL_GC_SEARCH = 'https://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' +URL_GC_LIST = \ + 'https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' +URL_GC_ACTIVITY = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' +URL_GC_ACTIVITY_DETAIL = \ + 'https://connect.garmin.com/modern/proxy/activity-service-1.3/json/activityDetails/' +URL_GC_GPX_ACTIVITY = \ + 'https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/' +URL_GC_TCX_ACTIVITY = \ + 'https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/' +URL_GC_ORIGINAL_ACTIVITY = 'http://connect.garmin.com/proxy/download-service/files/activity/' + +# Initially, we need to get a valid session cookie, so we pull the login page. +print('Request login page') +http_req(URL_GC_LOGIN) +print('Finish login page') + +# Now we'll actually login. +# Fields that are passed in a typical Garmin login. +POST_DATA = { + 'username': USERNAME, + 'password': PASSWORD, + 'embed': 'true', + 'lt': 'e1s1', + '_eventId': 'submit', + 'displayNameRequired': 'false' + } + +print('Post login data') +http_req(URL_GC_LOGIN, POST_DATA) +print('Finish login post') + +# Get the key. +# TODO: Can we do this without iterating? +LOGIN_TICKET = None +print("-------COOKIE") +for cookie in COOKIE_JAR: + if cookie.name == 'CASTGC': + print(cookie.name + ": " + cookie.value) + LOGIN_TICKET = cookie.value + break +print("-------COOKIE") + +if not LOGIN_TICKET: + raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct \ + username and password?') + +# Chop of 'TGT-' off the beginning, prepend 'ST-0'. +LOGIN_TICKET = 'ST-0' + LOGIN_TICKET[4:] +# print(LOGIN_TICKET) + +print('Request authentication') +# print(URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) +print("Request authentication URL: " + URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) +http_req(URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) +print('Finished authentication') + +# We should be logged in now. +if not isdir(ARGS.directory): + mkdir(ARGS.directory) + +CSV_FILENAME = ARGS.directory + '/activities.csv' +CSV_EXISTED = isfile(CSV_FILENAME) + +CSV_FILE = open(CSV_FILENAME, 'a') + +# Write header to CSV file +if not CSV_EXISTED: + CSV_FILE.write('Activity name,\ +Description,\ +Begin timestamp,\ +Duration (h:m:s),\ +Moving duration (h:m:s),\ +Distance (km),\ +Average speed (km/h),\ +Average moving speed (km/h),\ +Max. speed (km/h),\ +Elevation loss uncorrected (m),\ +Elevation gain uncorrected (m),\ +Elevation min. uncorrected (m),\ +Elevation max. uncorrected (m),\ +Min. heart rate (bpm),\ +Max. heart rate (bpm),\ +Average heart rate (bpm),\ +Calories,\ +Avg. cadence (rpm),\ +Max. cadence (rpm),\ +Strokes,\ +Avg. temp (°C),\ +Min. temp (°C),\ +Max. temp (°C),\ +Map,\ +End timestamp,\ +Begin timestamp (ms),\ +End timestamp (ms),\ +Device,\ +Activity type,\ +Event type,\ +Time zone,\ +Begin latitude (°DD),\ +Begin longitude (°DD),\ +End latitude (°DD),\ +End longitude (°DD),\ +Elevation gain corrected (m),\ +Elevation loss corrected (m),\ +Elevation max. corrected (m),\ +Elevation min. corrected (m),\ +Sample count\n') + +DOWNLOAD_ALL = False +if ARGS.count == 'all': + # If the user wants to download all activities, first download one, + # then the result of that request will tell us how many are available + # so we will modify the variables then. + TOTAL_TO_DOWNLOAD = 1 + DOWNLOAD_ALL = True +else: + TOTAL_TO_DOWNLOAD = int(ARGS.count) +TOTAL_DOWNLOADED = 0 + +# This while loop will download data from the server in multiple chunks, if necessary. +while TOTAL_DOWNLOADED < TOTAL_TO_DOWNLOAD: + # Maximum chunk size 'limit_maximum' ... 400 return status if over maximum. So download + # maximum or whatever remains if less than maximum. + # As of 2018-03-06 I get return status 500 if over maximum + if TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED > LIMIT_MAXIMUM: + NUM_TO_DOWNLOAD = LIMIT_MAXIMUM + else: + NUM_TO_DOWNLOAD = TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED + + SEARCH_PARAMS = {'start': TOTAL_DOWNLOADED, 'limit': NUM_TO_DOWNLOAD} + # Query Garmin Connect + print("Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(URL_GC_SEARCH + urllib.parse.urlencode(SEARCH_PARAMS)) + RESULT = http_req(URL_GC_SEARCH + urllib.parse.urlencode(SEARCH_PARAMS)) + print("Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + # Persist JSON + write_to_file(ARGS.directory + '/activities.json', RESULT.decode(), 'a') + + JSON_RESULTS = json.loads(RESULT) # TODO: Catch possible exceptions here. + + if DOWNLOAD_ALL: + # Modify TOTAL_TO_DOWNLOAD based on how many activities the server reports. + TOTAL_TO_DOWNLOAD = int(JSON_RESULTS['results']['totalFound']) + + # Do it only once. + DOWNLOAD_ALL = False + + # Pull out just the list of activities. + ACTIVITIES = JSON_RESULTS['results']['activities'] + # print(ACTIVITIES) + + print("Activity list URL: " + URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) + ACTIVITY_LIST = http_req(URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) + write_to_file(ARGS.directory + '/activity_list.json', ACTIVITY_LIST.decode(), 'a') + # LIST = json.loads(ACTIVITY_LIST) + # print(LIST) + + # Process each activity. + for a in ACTIVITIES: + # Display which entry we're working on. + print('Garmin Connect activity: [' + str(a['activity']['activityId']) + ']', end=' ') + print(a['activity']['activityName']) + print('\t' + a['activity']['uploadDate']['display'] + ',', end=' ') + if ARGS.format == 'gpx': + data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ + '_activity.gpx' + download_url = URL_GC_GPX_ACTIVITY + str(a['activity']['activityId']) + '?full=true' + print(download_url) + file_mode = 'w' + elif ARGS.format == 'tcx': + data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ + '_activity.tcx' + download_url = URL_GC_TCX_ACTIVITY + str(a['activity']['activityId']) + '?full=true' + file_mode = 'w' + elif ARGS.format == 'original': + data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ + '_activity.zip' + fit_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + '_activity.fit' + download_url = URL_GC_ORIGINAL_ACTIVITY + str(a['activity']['activityId']) + file_mode = 'wb' + else: + raise Exception('Unrecognized format.') + + if isfile(data_filename): + print('\tData file already exists; skipping...') + continue + # Regardless of unzip setting, don't redownload if the ZIP or FIT file exists. + if ARGS.format == 'original' and isfile(fit_filename): + print('\tFIT data file already exists; skipping...') + continue + + # Download the data file from Garmin Connect. If the download fails (e.g., due to timeout), + # this script will die, but nothing will have been written to disk about this activity, so + # just running it again should pick up where it left off. + print('\tDownloading file...', end=' ') + + try: + data = http_req(download_url) + except urllib.error.HTTPError as errs: + # Handle expected (though unfortunate) error codes; die on unexpected ones. + if errs.code == 500 and ARGS.format == 'tcx': + # Garmin will give an internal server error (HTTP 500) when downloading TCX files + # if the original was a manual GPX upload. Writing an empty file prevents this file + # from being redownloaded, similar to the way GPX files are saved even when there + # are no tracks. One could be generated here, but that's a bit much. Use the GPX + # format if you want actual data in every file, as I believe Garmin provides a GPX + # file for every activity. + print('Writing empty file since Garmin did not generate a TCX file for this \ + activity...', end=' ') + data = '' + elif errs.code == 404 and ARGS.format == 'original': + # For manual activities (i.e., entered in online without a file upload), there is + # no original file. # Write an empty file to prevent redownloading it. + print('Writing empty file since there was no original activity data...', end=' ') + data = '' + else: + raise Exception('Failed. Got an unexpected HTTP error (' + str(errs.code) + \ + download_url +').') + + # Persist file + write_to_file(data_filename, data.decode(), file_mode) + + print("Activity summary URL: " + URL_GC_ACTIVITY + str(a['activity']['activityId'])) + ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a['activity']['activityId'])) + write_to_file(ARGS.directory + '/' + str(a['activity']['activityId']) + \ + '_activity_summary.json', ACTIVITY_SUMMARY.decode(), 'a') + JSON_SUMMARY = json.loads(ACTIVITY_SUMMARY) + # print(JSON_SUMMARY) + + print("Activity details URL: " + URL_GC_ACTIVITY_DETAIL + str(a['activity']['activityId'])) + ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a['activity']['activityId'])) + write_to_file(ARGS.directory + '/' + str(a['activity']['activityId']) + \ + '_activity_detail.json', ACTIVITY_DETAIL.decode(), 'a') + JSON_DETAIL = json.loads(ACTIVITY_DETAIL) + # print(JSON_DETAIL) + + # Write stats to CSV. + empty_record = '"",' + csv_record = '' + + csv_record += empty_record if 'activityName' not in a['activity'] else '"' + \ + a['activity']['activityName'].replace('"', '""') + '",' + csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + \ + a['activity']['activityDescription'].replace('"', '""') + '",' + csv_record += empty_record if 'startTimeLocal' not in JSON_SUMMARY['summaryDTO'] \ + else '"' + JSON_SUMMARY['summaryDTO']['startTimeLocal'] + '",' + csv_record += empty_record if 'elapsedDuration' not in JSON_SUMMARY['summaryDTO'] \ + else hhmmss_from_seconds(JSON_SUMMARY['summaryDTO']['elapsedDuration']) + ',' + csv_record += empty_record if 'movingDuration' not in JSON_SUMMARY['summaryDTO'] \ + else hhmmss_from_seconds(JSON_SUMMARY['summaryDTO']['movingDuration']) + ',' + csv_record += empty_record if 'distance' not in JSON_SUMMARY['summaryDTO'] \ + else "{0:.5f}".format(JSON_SUMMARY['summaryDTO']['distance']/1000) + ',' + csv_record += empty_record if 'averageSpeed' not in JSON_SUMMARY['summaryDTO'] \ + else kmh_from_mps(JSON_SUMMARY['summaryDTO']['averageSpeed']) + ',' + csv_record += empty_record if 'averageMovingSpeed' not in JSON_SUMMARY['summaryDTO'] \ + else kmh_from_mps(JSON_SUMMARY['summaryDTO']['averageMovingSpeed']) + ',' + csv_record += empty_record if 'maxSpeed' not in JSON_SUMMARY['summaryDTO'] \ + else kmh_from_mps(JSON_SUMMARY['summaryDTO']['maxSpeed']) + ',' + csv_record += empty_record if 'elevationLoss' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['elevationLoss']) + ',' + csv_record += empty_record if 'elevationGain' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['elevationGain']) + ',' + csv_record += empty_record if 'minElevation' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['minElevation']) + ',' + csv_record += empty_record if 'maxElevation' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['maxElevation']) + ',' + csv_record += empty_record if 'minHR' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'maxHR' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['maxHR']) + ',' + csv_record += empty_record if 'averageHR' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['averageHR']) + ',' + csv_record += empty_record if 'calories' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['calories']) + ',' + csv_record += empty_record if 'averageBikeCadence' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['averageBikeCadence']) + ',' + csv_record += empty_record if 'maxBikeCadence' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['maxBikeCadence']) + ',' + csv_record += empty_record if 'totalNumberOfStrokes' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['totalNumberOfStrokes']) + ',' + csv_record += empty_record if 'averageTemperature' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['averageTemperature']) + ',' + csv_record += empty_record if 'minTemperature' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['minTemperature']) + ',' + csv_record += empty_record if 'maxTemperature' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['maxTemperature']) + ',' + csv_record += empty_record if 'activityId' not in a['activity'] else \ + '"https://connect.garmin.com/modern/activity/' + str(a['activity']['activityId']) + '",' + csv_record += empty_record if 'endTimestamp' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'beginTimestamp' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'endTimestamp' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'device' not in a['activity'] else \ + a['activity']['device']['display'] + ' ' + a['activity']['device']['version'] + ',' + csv_record += empty_record if 'activityType' not in a['activity'] else \ + a['activity']['activityType']['display'] + ',' + csv_record += empty_record if 'eventType' not in a['activity'] else \ + a['activity']['eventType']['display'] + ',' + csv_record += empty_record if 'activityTimeZone' not in a['activity'] else \ + a['activity']['activityTimeZone']['display'] + ',' + csv_record += empty_record if 'startLatitude' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['startLatitude']) + ',' + csv_record += empty_record if 'startLongitude' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['startLongitude']) + ',' + csv_record += empty_record if 'endLatitude' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['endLatitude']) + ',' + csv_record += empty_record if 'endLongitude' not in JSON_SUMMARY['summaryDTO'] \ + else str(JSON_SUMMARY['summaryDTO']['endLongitude']) + ',' + csv_record += empty_record if 'gainCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'lossCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'maxCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'minCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ + else ',' # no longer available in JSON + csv_record += empty_record if 'metricsCount' not in \ + JSON_DETAIL['com.garmin.activity.details.json.ActivityDetails'] else \ + str(JSON_DETAIL['com.garmin.activity.details.json.ActivityDetails']['metricsCount']) \ + + ',' + csv_record += '\n' + + CSV_FILE.write(csv_record) + + if ARGS.format == 'gpx': + # Validate GPX data. If we have an activity without GPS data (e.g., running on a + # treadmill), Garmin Connect still kicks out a GPX, but there is only activity + # information, no GPS data. N.B. You can omit the XML parse (and the associated log + # messages) to speed things up. + gpx = parseString(data) + if gpx.getElementsByTagName('trkpt'): + print('Done. GPX data saved.') + else: + print('Done. No track points found.') + elif ARGS.format == 'original': + # Even manual upload of a GPX file is zipped, but we'll validate the extension. + if ARGS.unzip and data_filename[-3:].lower() == 'zip': + print("Unzipping and removing original files...", end=' ') + print('Filesize is: ' + str(stat(data_filename).st_size)) + if stat(data_filename).st_size > 0: + zip_file = open(data_filename, 'rb') + z = zipfile.ZipFile(zip_file) + for name in z.namelist(): + z.extract(name, ARGS.directory) + zip_file.close() + else: + print('Skipping 0Kb zip file.') + remove(data_filename) + print('Done.') + else: + # TODO: Consider validating other formats. + print('Done.') + TOTAL_DOWNLOADED += NUM_TO_DOWNLOAD +# End while loop for multiple chunks. + +CSV_FILE.close() + +print('Open CSV output.') +print(CSV_FILENAME) +# open CSV file. Comment this line out if you don't want this behavior +call(["/usr/bin/libreoffice6.0", "--calc", CSV_FILENAME]) + +print('Done!') From bb1bf2d4177947ed6a61182a647923c873009c23 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Tue, 17 Apr 2018 07:53:06 -0700 Subject: [PATCH 13/33] Bump version on account of Python 3 refactor and small cosmetic changes. Signed-off-by: Michael Payne --- gcexport3.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index efde6c9..255a67f 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -32,7 +32,7 @@ import urllib.request import zipfile -SCRIPT_VERSION = '1.0.0' +SCRIPT_VERSION = '2.0.0' CURRENT_DATE = datetime.now().strftime('%Y-%m-%d') ACTIVITIES_DIRECTORY = './' + CURRENT_DATE + '_garmin_connect_export' @@ -45,16 +45,12 @@ prompted)", nargs='?') PARSER.add_argument('--password', help="your Garmin Connect password (otherwise, you will be \ prompted)", nargs='?') - PARSER.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to \ download, or 'all' (default: 1)") - PARSER.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", - help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") - + help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") PARSER.add_argument('-d', '--directory', nargs='?', default=ACTIVITIES_DIRECTORY, help="the \ directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") - PARSER.add_argument('-u', '--unzip', help="if downloading ZIP files (format: 'original'), unzip \ the file and removes the ZIP file", action="store_true") @@ -93,7 +89,6 @@ def http_req(url, post=None, headers=None): for header_key, header_value in headers.items(): request.add_header(header_key, header_value) if post: - # print('POSTING') post = urllib.parse.urlencode(post) post = post.encode('utf-8') # Convert dictionary to POST parameter string. # print(request.headers) From c3bdff5faeb2de9c73a0891e27a2636c7611e818 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Thu, 19 Apr 2018 19:06:53 -0700 Subject: [PATCH 14/33] Formatting, pylint changes and resolve https://github.com/moderation/garmin-connect-export/issues/13 issue. Signed-off-by: Michael Payne --- gcexport3.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 255a67f..dee4bef 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -47,8 +47,8 @@ prompted)", nargs='?') PARSER.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to \ download, or 'all' (default: 1)") -PARSER.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", - help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") +PARSER.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], \ + default="gpx", help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") PARSER.add_argument('-d', '--directory', nargs='?', default=ACTIVITIES_DIRECTORY, help="the \ directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") PARSER.add_argument('-u', '--unzip', help="if downloading ZIP files (format: 'original'), unzip \ @@ -66,7 +66,11 @@ def hhmmss_from_seconds(sec): """Helper function that converts seconds to HH:MM:SS time format.""" - return str(timedelta(seconds=int(sec))).zfill(8) + if isinstance(sec, (float)): + formatted_time = str(timedelta(seconds=int(sec))).zfill(8) + else: + formatted_time = "0.000" + return formatted_time def kmh_from_mps(mps): """Helper function that converts meters per second (mps) to km/h.""" @@ -121,7 +125,6 @@ def http_req(url, post=None, headers=None): WEBHOST = "https://connect.garmin.com" REDIRECT = "https://connect.garmin.com/post-auth/login" BASE_URL = "http://connect.garmin.com/en-US/signin" -GAUTH = "http://connect.garmin.com/gauth/hostname" SSO = "https://sso.garmin.com/sso" CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.2-min.css" @@ -392,7 +395,7 @@ def http_req(url, post=None, headers=None): # print(JSON_DETAIL) # Write stats to CSV. - empty_record = '"",' + empty_record = ',' csv_record = '' csv_record += empty_record if 'activityName' not in a['activity'] else '"' + \ From 9a7c78e158467ac674499325915e6c2844742b63 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sat, 21 Apr 2018 08:53:54 -0700 Subject: [PATCH 15/33] Implement @pe-st method for getting session ticket replacing loop on the cookie jar. Signed-off-by: Michael Payne --- gcexport3.py | 33 +++++++++++---------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index dee4bef..839e9b8 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -27,6 +27,7 @@ import argparse import http.cookiejar import json +import re import urllib.error import urllib.parse import urllib.request @@ -185,30 +186,18 @@ def http_req(url, post=None, headers=None): } print('Post login data') -http_req(URL_GC_LOGIN, POST_DATA) +LOGIN_RESPONSE = http_req(URL_GC_LOGIN, POST_DATA).decode() print('Finish login post') -# Get the key. -# TODO: Can we do this without iterating? -LOGIN_TICKET = None -print("-------COOKIE") -for cookie in COOKIE_JAR: - if cookie.name == 'CASTGC': - print(cookie.name + ": " + cookie.value) - LOGIN_TICKET = cookie.value - break -print("-------COOKIE") - -if not LOGIN_TICKET: - raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct \ - username and password?') - -# Chop of 'TGT-' off the beginning, prepend 'ST-0'. -LOGIN_TICKET = 'ST-0' + LOGIN_TICKET[4:] -# print(LOGIN_TICKET) - -print('Request authentication') -# print(URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) +# extract the ticket from the login response +PATTERN = re.compile(r".*\?ticket=([-\w]+)\";.*", re.MULTILINE|re.DOTALL) +MATCH = PATTERN.match(LOGIN_RESPONSE) +if not MATCH: + raise Exception('Did not get a ticket in the login response. Cannot log in. Did \ + you enter the correct username and password?') +LOGIN_TICKET = MATCH.group(1) +print('login ticket=' + LOGIN_TICKET) + print("Request authentication URL: " + URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) http_req(URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) print('Finished authentication') From 5be078ab06f6d75896af31a92cf565d17d0905a1 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sat, 21 Apr 2018 09:25:24 -0700 Subject: [PATCH 16/33] Remove urllib2 warnings as it is not used. Fix debug print statements. Fix print formatting. Signed-off-by: Michael Payne --- gcexport3.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 839e9b8..ba65ea8 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -96,17 +96,13 @@ def http_req(url, post=None, headers=None): if post: post = urllib.parse.urlencode(post) post = post.encode('utf-8') # Convert dictionary to POST parameter string. - # print(request.headers) - # print(COOKIE_JAR) - # print(post) - # print(request) - response = OPENER.open(request, data=post) # This line may throw a urllib2.HTTPError. - - # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a - # urllib2.HTTPError which is checked for below. - # print(response.getcode()) + # print("request.headers: " + str(request.headers) + " COOKIE_JAR: " + str(COOKIE_JAR)) + # print("post: " + str(post) + "request: " + str(request)) + response = OPENER.open((request), data=post) + if response.getcode() != 200: raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) + # print(response.getcode()) return response.read() @@ -115,7 +111,7 @@ def http_req(url, post=None, headers=None): # Create directory for data files. if isdir(ARGS.directory): print('Warning: Output directory already exists. Will skip already-downloaded files and \ - append to the CSV file.') +append to the CSV file.') USERNAME = ARGS.username if ARGS.username else input('Username: ') PASSWORD = ARGS.password if ARGS.password else getpass() @@ -194,7 +190,7 @@ def http_req(url, post=None, headers=None): MATCH = PATTERN.match(LOGIN_RESPONSE) if not MATCH: raise Exception('Did not get a ticket in the login response. Cannot log in. Did \ - you enter the correct username and password?') +you enter the correct username and password?') LOGIN_TICKET = MATCH.group(1) print('login ticket=' + LOGIN_TICKET) @@ -355,7 +351,7 @@ def http_req(url, post=None, headers=None): # format if you want actual data in every file, as I believe Garmin provides a GPX # file for every activity. print('Writing empty file since Garmin did not generate a TCX file for this \ - activity...', end=' ') +activity...', end=' ') data = '' elif errs.code == 404 and ARGS.format == 'original': # For manual activities (i.e., entered in online without a file upload), there is From 74f0e341229d668f05107974e2bbd5fdcc9df0d0 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Mon, 7 May 2018 08:58:46 -0700 Subject: [PATCH 17/33] Fix regression in move to Python 3. Re-implement https://github.com/moderation/garmin-connect-export/pull/8. Signed-off-by: Michael Payne --- gcexport3.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index ba65ea8..7e9d26b 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -100,7 +100,12 @@ def http_req(url, post=None, headers=None): # print("post: " + str(post) + "request: " + str(request)) response = OPENER.open((request), data=post) - if response.getcode() != 200: + if response.getcode() == 204: + # For activities without GPS coordinates, there is no GPX download (204 = no content). + # Write an empty file to prevent redownloading it. + print('Writing empty file since there was no GPX activity data...') + return '' + elif response.getcode() != 200: raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) # print(response.getcode()) @@ -469,11 +474,11 @@ def http_req(url, post=None, headers=None): CSV_FILE.write(csv_record) - if ARGS.format == 'gpx': + if ARGS.format == 'gpx' and data: # Validate GPX data. If we have an activity without GPS data (e.g., running on a - # treadmill), Garmin Connect still kicks out a GPX, but there is only activity - # information, no GPS data. N.B. You can omit the XML parse (and the associated log - # messages) to speed things up. + # treadmill), Garmin Connect still kicks out a GPX (sometimes), but there is only + # activity information, no GPS data. N.B. You can omit the XML parse (and the + # associated log messages) to speed things up. gpx = parseString(data) if gpx.getElementsByTagName('trkpt'): print('Done. GPX data saved.') From d8e66376f825bc3c4b2ccff38ed772b53002fe75 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Wed, 15 Aug 2018 07:28:22 -0700 Subject: [PATCH 18/33] Linting by black. Upgrade to Libre Office 6.1. Signed-off-by: Michael Payne --- gcexport3.py | 691 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 460 insertions(+), 231 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 7e9d26b..213b7ea 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -33,27 +33,58 @@ import urllib.request import zipfile -SCRIPT_VERSION = '2.0.0' -CURRENT_DATE = datetime.now().strftime('%Y-%m-%d') -ACTIVITIES_DIRECTORY = './' + CURRENT_DATE + '_garmin_connect_export' +SCRIPT_VERSION = "2.0.0" +CURRENT_DATE = datetime.now().strftime("%Y-%m-%d") +ACTIVITIES_DIRECTORY = "./" + CURRENT_DATE + "_garmin_connect_export" PARSER = argparse.ArgumentParser() # TODO: Implement verbose and/or quiet options. # PARSER.add_argument('-v', '--verbose', help="increase output verbosity", action="store_true") -PARSER.add_argument('--version', help="print version and exit", action="store_true") -PARSER.add_argument('--username', help="your Garmin Connect username (otherwise, you will be \ - prompted)", nargs='?') -PARSER.add_argument('--password', help="your Garmin Connect password (otherwise, you will be \ - prompted)", nargs='?') -PARSER.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to \ - download, or 'all' (default: 1)") -PARSER.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], \ - default="gpx", help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") -PARSER.add_argument('-d', '--directory', nargs='?', default=ACTIVITIES_DIRECTORY, help="the \ - directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") -PARSER.add_argument('-u', '--unzip', help="if downloading ZIP files (format: 'original'), unzip \ - the file and removes the ZIP file", action="store_true") +PARSER.add_argument("--version", help="print version and exit", action="store_true") +PARSER.add_argument( + "--username", + help="your Garmin Connect username (otherwise, you will be \ + prompted)", + nargs="?", +) +PARSER.add_argument( + "--password", + help="your Garmin Connect password (otherwise, you will be \ + prompted)", + nargs="?", +) +PARSER.add_argument( + "-c", + "--count", + nargs="?", + default="1", + help="number of recent activities to \ + download, or 'all' (default: 1)", +) +PARSER.add_argument( + "-f", + "--format", + nargs="?", + choices=["gpx", "tcx", "original"], + default="gpx", + help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')", +) +PARSER.add_argument( + "-d", + "--directory", + nargs="?", + default=ACTIVITIES_DIRECTORY, + help="the \ + directory to export to (default: './YYYY-MM-DD_garmin_connect_export')", +) +PARSER.add_argument( + "-u", + "--unzip", + help="if downloading ZIP files (format: 'original'), unzip \ + the file and removes the ZIP file", + action="store_true", +) ARGS = PARSER.parse_args() @@ -65,6 +96,7 @@ OPENER = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(COOKIE_JAR)) # print(COOKIE_JAR) + def hhmmss_from_seconds(sec): """Helper function that converts seconds to HH:MM:SS time format.""" if isinstance(sec, (float)): @@ -73,29 +105,35 @@ def hhmmss_from_seconds(sec): formatted_time = "0.000" return formatted_time + def kmh_from_mps(mps): """Helper function that converts meters per second (mps) to km/h.""" return str(mps * 3.6) + def write_to_file(filename, content, mode): """Helper function that persists content to file.""" write_file = open(filename, mode) write_file.write(content) write_file.close() + # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers=None): """Helper function that makes the HTTP requests.""" request = urllib.request.Request(url) # Tell Garmin we're some supported browser. - request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, \ - like Gecko) Chrome/54.0.2816.0 Safari/537.36') + request.add_header( + "User-Agent", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, \ + like Gecko) Chrome/54.0.2816.0 Safari/537.36", + ) if headers: for header_key, header_value in headers.items(): request.add_header(header_key, header_value) if post: post = urllib.parse.urlencode(post) - post = post.encode('utf-8') # Convert dictionary to POST parameter string. + post = post.encode("utf-8") # Convert dictionary to POST parameter string. # print("request.headers: " + str(request.headers) + " COOKIE_JAR: " + str(COOKIE_JAR)) # print("post: " + str(post) + "request: " + str(request)) response = OPENER.open((request), data=post) @@ -103,22 +141,25 @@ def http_req(url, post=None, headers=None): if response.getcode() == 204: # For activities without GPS coordinates, there is no GPX download (204 = no content). # Write an empty file to prevent redownloading it. - print('Writing empty file since there was no GPX activity data...') - return '' + print("Writing empty file since there was no GPX activity data...") + return "" elif response.getcode() != 200: - raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url) + raise Exception("Bad return code (" + str(response.getcode()) + ") for: " + url) # print(response.getcode()) return response.read() -print('Welcome to Garmin Connect Exporter!') + +print("Welcome to Garmin Connect Exporter!") # Create directory for data files. if isdir(ARGS.directory): - print('Warning: Output directory already exists. Will skip already-downloaded files and \ -append to the CSV file.') + print( + "Warning: Output directory already exists. Will skip already-downloaded files and \ +append to the CSV file." + ) -USERNAME = ARGS.username if ARGS.username else input('Username: ') +USERNAME = ARGS.username if ARGS.username else input("Username: ") PASSWORD = ARGS.password if ARGS.password else getpass() # Maximum number of activities you can request at once. Set and enforced by Garmin. @@ -131,90 +172,99 @@ def http_req(url, post=None, headers=None): CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.2-min.css" DATA = { - 'service': REDIRECT, - 'webhost': WEBHOST, - 'source': BASE_URL, - 'redirectAfterAccountLoginUrl': REDIRECT, - 'redirectAfterAccountCreationUrl': REDIRECT, - 'gauthHost': SSO, - 'locale': 'en_US', - 'id': 'gauth-widget', - 'cssUrl': CSS, - 'clientId': 'GarminConnect', - 'rememberMeShown': 'true', - 'rememberMeChecked': 'false', - 'createAccountShown': 'true', - 'openCreateAccount': 'false', - 'usernameShown': 'false', - 'displayNameShown': 'false', - 'consumeServiceTicket': 'false', - 'initialFocus': 'true', - 'embedWidget': 'false', - 'generateExtraServiceTicket': 'false' - } + "service": REDIRECT, + "webhost": WEBHOST, + "source": BASE_URL, + "redirectAfterAccountLoginUrl": REDIRECT, + "redirectAfterAccountCreationUrl": REDIRECT, + "gauthHost": SSO, + "locale": "en_US", + "id": "gauth-widget", + "cssUrl": CSS, + "clientId": "GarminConnect", + "rememberMeShown": "true", + "rememberMeChecked": "false", + "createAccountShown": "true", + "openCreateAccount": "false", + "usernameShown": "false", + "displayNameShown": "false", + "consumeServiceTicket": "false", + "initialFocus": "true", + "embedWidget": "false", + "generateExtraServiceTicket": "false", +} print(urllib.parse.urlencode(DATA)) # URLs for various services. -URL_GC_LOGIN = 'https://sso.garmin.com/sso/login?' + urllib.parse.urlencode(DATA) -URL_GC_POST_AUTH = 'https://connect.garmin.com/modern/activities?' -URL_GC_SEARCH = 'https://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' -URL_GC_LIST = \ - 'https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' -URL_GC_ACTIVITY = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' -URL_GC_ACTIVITY_DETAIL = \ - 'https://connect.garmin.com/modern/proxy/activity-service-1.3/json/activityDetails/' -URL_GC_GPX_ACTIVITY = \ - 'https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/' -URL_GC_TCX_ACTIVITY = \ - 'https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/' -URL_GC_ORIGINAL_ACTIVITY = 'http://connect.garmin.com/proxy/download-service/files/activity/' +URL_GC_LOGIN = "https://sso.garmin.com/sso/login?" + urllib.parse.urlencode(DATA) +URL_GC_POST_AUTH = "https://connect.garmin.com/modern/activities?" +URL_GC_SEARCH = ( + "https://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?" +) +URL_GC_LIST = "https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?" +URL_GC_ACTIVITY = "https://connect.garmin.com/modern/proxy/activity-service/activity/" +URL_GC_ACTIVITY_DETAIL = ( + "https://connect.garmin.com/modern/proxy/activity-service-1.3/json/activityDetails/" +) +URL_GC_GPX_ACTIVITY = ( + "https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/" +) +URL_GC_TCX_ACTIVITY = ( + "https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/" +) +URL_GC_ORIGINAL_ACTIVITY = ( + "http://connect.garmin.com/proxy/download-service/files/activity/" +) # Initially, we need to get a valid session cookie, so we pull the login page. -print('Request login page') +print("Request login page") http_req(URL_GC_LOGIN) -print('Finish login page') +print("Finish login page") # Now we'll actually login. # Fields that are passed in a typical Garmin login. POST_DATA = { - 'username': USERNAME, - 'password': PASSWORD, - 'embed': 'true', - 'lt': 'e1s1', - '_eventId': 'submit', - 'displayNameRequired': 'false' - } - -print('Post login data') + "username": USERNAME, + "password": PASSWORD, + "embed": "true", + "lt": "e1s1", + "_eventId": "submit", + "displayNameRequired": "false", +} + +print("Post login data") LOGIN_RESPONSE = http_req(URL_GC_LOGIN, POST_DATA).decode() -print('Finish login post') +print("Finish login post") # extract the ticket from the login response -PATTERN = re.compile(r".*\?ticket=([-\w]+)\";.*", re.MULTILINE|re.DOTALL) +PATTERN = re.compile(r".*\?ticket=([-\w]+)\";.*", re.MULTILINE | re.DOTALL) MATCH = PATTERN.match(LOGIN_RESPONSE) if not MATCH: - raise Exception('Did not get a ticket in the login response. Cannot log in. Did \ -you enter the correct username and password?') + raise Exception( + "Did not get a ticket in the login response. Cannot log in. Did \ +you enter the correct username and password?" + ) LOGIN_TICKET = MATCH.group(1) -print('login ticket=' + LOGIN_TICKET) +print("login ticket=" + LOGIN_TICKET) -print("Request authentication URL: " + URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) -http_req(URL_GC_POST_AUTH + 'ticket=' + LOGIN_TICKET) -print('Finished authentication') +print("Request authentication URL: " + URL_GC_POST_AUTH + "ticket=" + LOGIN_TICKET) +http_req(URL_GC_POST_AUTH + "ticket=" + LOGIN_TICKET) +print("Finished authentication") # We should be logged in now. if not isdir(ARGS.directory): mkdir(ARGS.directory) -CSV_FILENAME = ARGS.directory + '/activities.csv' +CSV_FILENAME = ARGS.directory + "/activities.csv" CSV_EXISTED = isfile(CSV_FILENAME) -CSV_FILE = open(CSV_FILENAME, 'a') +CSV_FILE = open(CSV_FILENAME, "a") # Write header to CSV file if not CSV_EXISTED: - CSV_FILE.write('Activity name,\ + CSV_FILE.write( + "Activity name,\ Description,\ Begin timestamp,\ Duration (h:m:s),\ @@ -253,10 +303,11 @@ def http_req(url, post=None, headers=None): Elevation loss corrected (m),\ Elevation max. corrected (m),\ Elevation min. corrected (m),\ -Sample count\n') +Sample count\n" + ) DOWNLOAD_ALL = False -if ARGS.count == 'all': +if ARGS.count == "all": # If the user wants to download all activities, first download one, # then the result of that request will tell us how many are available # so we will modify the variables then. @@ -276,7 +327,7 @@ def http_req(url, post=None, headers=None): else: NUM_TO_DOWNLOAD = TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED - SEARCH_PARAMS = {'start': TOTAL_DOWNLOADED, 'limit': NUM_TO_DOWNLOAD} + SEARCH_PARAMS = {"start": TOTAL_DOWNLOADED, "limit": NUM_TO_DOWNLOAD} # Query Garmin Connect print("Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") print(URL_GC_SEARCH + urllib.parse.urlencode(SEARCH_PARAMS)) @@ -284,232 +335,410 @@ def http_req(url, post=None, headers=None): print("Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") # Persist JSON - write_to_file(ARGS.directory + '/activities.json', RESULT.decode(), 'a') + write_to_file(ARGS.directory + "/activities.json", RESULT.decode(), "a") JSON_RESULTS = json.loads(RESULT) # TODO: Catch possible exceptions here. if DOWNLOAD_ALL: # Modify TOTAL_TO_DOWNLOAD based on how many activities the server reports. - TOTAL_TO_DOWNLOAD = int(JSON_RESULTS['results']['totalFound']) + TOTAL_TO_DOWNLOAD = int(JSON_RESULTS["results"]["totalFound"]) # Do it only once. DOWNLOAD_ALL = False # Pull out just the list of activities. - ACTIVITIES = JSON_RESULTS['results']['activities'] + ACTIVITIES = JSON_RESULTS["results"]["activities"] # print(ACTIVITIES) print("Activity list URL: " + URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) ACTIVITY_LIST = http_req(URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) - write_to_file(ARGS.directory + '/activity_list.json', ACTIVITY_LIST.decode(), 'a') + write_to_file(ARGS.directory + "/activity_list.json", ACTIVITY_LIST.decode(), "a") # LIST = json.loads(ACTIVITY_LIST) # print(LIST) # Process each activity. for a in ACTIVITIES: # Display which entry we're working on. - print('Garmin Connect activity: [' + str(a['activity']['activityId']) + ']', end=' ') - print(a['activity']['activityName']) - print('\t' + a['activity']['uploadDate']['display'] + ',', end=' ') - if ARGS.format == 'gpx': - data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ - '_activity.gpx' - download_url = URL_GC_GPX_ACTIVITY + str(a['activity']['activityId']) + '?full=true' + print( + "Garmin Connect activity: [" + str(a["activity"]["activityId"]) + "]", + end=" ", + ) + print(a["activity"]["activityName"]) + print("\t" + a["activity"]["uploadDate"]["display"] + ",", end=" ") + if ARGS.format == "gpx": + data_filename = ( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity.gpx" + ) + download_url = ( + URL_GC_GPX_ACTIVITY + str(a["activity"]["activityId"]) + "?full=true" + ) print(download_url) - file_mode = 'w' - elif ARGS.format == 'tcx': - data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ - '_activity.tcx' - download_url = URL_GC_TCX_ACTIVITY + str(a['activity']['activityId']) + '?full=true' - file_mode = 'w' - elif ARGS.format == 'original': - data_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + \ - '_activity.zip' - fit_filename = ARGS.directory + '/' + str(a['activity']['activityId']) + '_activity.fit' - download_url = URL_GC_ORIGINAL_ACTIVITY + str(a['activity']['activityId']) - file_mode = 'wb' + file_mode = "w" + elif ARGS.format == "tcx": + data_filename = ( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity.tcx" + ) + download_url = ( + URL_GC_TCX_ACTIVITY + str(a["activity"]["activityId"]) + "?full=true" + ) + file_mode = "w" + elif ARGS.format == "original": + data_filename = ( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity.zip" + ) + fit_filename = ( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity.fit" + ) + download_url = URL_GC_ORIGINAL_ACTIVITY + str(a["activity"]["activityId"]) + file_mode = "wb" else: - raise Exception('Unrecognized format.') + raise Exception("Unrecognized format.") if isfile(data_filename): - print('\tData file already exists; skipping...') + print("\tData file already exists; skipping...") continue # Regardless of unzip setting, don't redownload if the ZIP or FIT file exists. - if ARGS.format == 'original' and isfile(fit_filename): - print('\tFIT data file already exists; skipping...') + if ARGS.format == "original" and isfile(fit_filename): + print("\tFIT data file already exists; skipping...") continue # Download the data file from Garmin Connect. If the download fails (e.g., due to timeout), # this script will die, but nothing will have been written to disk about this activity, so # just running it again should pick up where it left off. - print('\tDownloading file...', end=' ') + print("\tDownloading file...", end=" ") try: data = http_req(download_url) except urllib.error.HTTPError as errs: # Handle expected (though unfortunate) error codes; die on unexpected ones. - if errs.code == 500 and ARGS.format == 'tcx': + if errs.code == 500 and ARGS.format == "tcx": # Garmin will give an internal server error (HTTP 500) when downloading TCX files # if the original was a manual GPX upload. Writing an empty file prevents this file # from being redownloaded, similar to the way GPX files are saved even when there # are no tracks. One could be generated here, but that's a bit much. Use the GPX # format if you want actual data in every file, as I believe Garmin provides a GPX # file for every activity. - print('Writing empty file since Garmin did not generate a TCX file for this \ -activity...', end=' ') - data = '' - elif errs.code == 404 and ARGS.format == 'original': + print( + "Writing empty file since Garmin did not generate a TCX file for this \ +activity...", + end=" ", + ) + data = "" + elif errs.code == 404 and ARGS.format == "original": # For manual activities (i.e., entered in online without a file upload), there is # no original file. # Write an empty file to prevent redownloading it. - print('Writing empty file since there was no original activity data...', end=' ') - data = '' + print( + "Writing empty file since there was no original activity data...", + end=" ", + ) + data = "" else: - raise Exception('Failed. Got an unexpected HTTP error (' + str(errs.code) + \ - download_url +').') + raise Exception( + "Failed. Got an unexpected HTTP error (" + + str(errs.code) + + download_url + + ")." + ) # Persist file write_to_file(data_filename, data.decode(), file_mode) - print("Activity summary URL: " + URL_GC_ACTIVITY + str(a['activity']['activityId'])) - ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a['activity']['activityId'])) - write_to_file(ARGS.directory + '/' + str(a['activity']['activityId']) + \ - '_activity_summary.json', ACTIVITY_SUMMARY.decode(), 'a') + print( + "Activity summary URL: " + + URL_GC_ACTIVITY + + str(a["activity"]["activityId"]) + ) + ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activity"]["activityId"])) + write_to_file( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity_summary.json", + ACTIVITY_SUMMARY.decode(), + "a", + ) JSON_SUMMARY = json.loads(ACTIVITY_SUMMARY) # print(JSON_SUMMARY) - print("Activity details URL: " + URL_GC_ACTIVITY_DETAIL + str(a['activity']['activityId'])) - ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a['activity']['activityId'])) - write_to_file(ARGS.directory + '/' + str(a['activity']['activityId']) + \ - '_activity_detail.json', ACTIVITY_DETAIL.decode(), 'a') + print( + "Activity details URL: " + + URL_GC_ACTIVITY_DETAIL + + str(a["activity"]["activityId"]) + ) + ACTIVITY_DETAIL = http_req( + URL_GC_ACTIVITY_DETAIL + str(a["activity"]["activityId"]) + ) + write_to_file( + ARGS.directory + + "/" + + str(a["activity"]["activityId"]) + + "_activity_detail.json", + ACTIVITY_DETAIL.decode(), + "a", + ) JSON_DETAIL = json.loads(ACTIVITY_DETAIL) # print(JSON_DETAIL) # Write stats to CSV. - empty_record = ',' - csv_record = '' - - csv_record += empty_record if 'activityName' not in a['activity'] else '"' + \ - a['activity']['activityName'].replace('"', '""') + '",' - csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + \ - a['activity']['activityDescription'].replace('"', '""') + '",' - csv_record += empty_record if 'startTimeLocal' not in JSON_SUMMARY['summaryDTO'] \ - else '"' + JSON_SUMMARY['summaryDTO']['startTimeLocal'] + '",' - csv_record += empty_record if 'elapsedDuration' not in JSON_SUMMARY['summaryDTO'] \ - else hhmmss_from_seconds(JSON_SUMMARY['summaryDTO']['elapsedDuration']) + ',' - csv_record += empty_record if 'movingDuration' not in JSON_SUMMARY['summaryDTO'] \ - else hhmmss_from_seconds(JSON_SUMMARY['summaryDTO']['movingDuration']) + ',' - csv_record += empty_record if 'distance' not in JSON_SUMMARY['summaryDTO'] \ - else "{0:.5f}".format(JSON_SUMMARY['summaryDTO']['distance']/1000) + ',' - csv_record += empty_record if 'averageSpeed' not in JSON_SUMMARY['summaryDTO'] \ - else kmh_from_mps(JSON_SUMMARY['summaryDTO']['averageSpeed']) + ',' - csv_record += empty_record if 'averageMovingSpeed' not in JSON_SUMMARY['summaryDTO'] \ - else kmh_from_mps(JSON_SUMMARY['summaryDTO']['averageMovingSpeed']) + ',' - csv_record += empty_record if 'maxSpeed' not in JSON_SUMMARY['summaryDTO'] \ - else kmh_from_mps(JSON_SUMMARY['summaryDTO']['maxSpeed']) + ',' - csv_record += empty_record if 'elevationLoss' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['elevationLoss']) + ',' - csv_record += empty_record if 'elevationGain' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['elevationGain']) + ',' - csv_record += empty_record if 'minElevation' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['minElevation']) + ',' - csv_record += empty_record if 'maxElevation' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['maxElevation']) + ',' - csv_record += empty_record if 'minHR' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'maxHR' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['maxHR']) + ',' - csv_record += empty_record if 'averageHR' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['averageHR']) + ',' - csv_record += empty_record if 'calories' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['calories']) + ',' - csv_record += empty_record if 'averageBikeCadence' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['averageBikeCadence']) + ',' - csv_record += empty_record if 'maxBikeCadence' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['maxBikeCadence']) + ',' - csv_record += empty_record if 'totalNumberOfStrokes' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['totalNumberOfStrokes']) + ',' - csv_record += empty_record if 'averageTemperature' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['averageTemperature']) + ',' - csv_record += empty_record if 'minTemperature' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['minTemperature']) + ',' - csv_record += empty_record if 'maxTemperature' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['maxTemperature']) + ',' - csv_record += empty_record if 'activityId' not in a['activity'] else \ - '"https://connect.garmin.com/modern/activity/' + str(a['activity']['activityId']) + '",' - csv_record += empty_record if 'endTimestamp' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'beginTimestamp' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'endTimestamp' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'device' not in a['activity'] else \ - a['activity']['device']['display'] + ' ' + a['activity']['device']['version'] + ',' - csv_record += empty_record if 'activityType' not in a['activity'] else \ - a['activity']['activityType']['display'] + ',' - csv_record += empty_record if 'eventType' not in a['activity'] else \ - a['activity']['eventType']['display'] + ',' - csv_record += empty_record if 'activityTimeZone' not in a['activity'] else \ - a['activity']['activityTimeZone']['display'] + ',' - csv_record += empty_record if 'startLatitude' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['startLatitude']) + ',' - csv_record += empty_record if 'startLongitude' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['startLongitude']) + ',' - csv_record += empty_record if 'endLatitude' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['endLatitude']) + ',' - csv_record += empty_record if 'endLongitude' not in JSON_SUMMARY['summaryDTO'] \ - else str(JSON_SUMMARY['summaryDTO']['endLongitude']) + ',' - csv_record += empty_record if 'gainCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'lossCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'maxCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'minCorrectedElevation' not in JSON_SUMMARY['summaryDTO'] \ - else ',' # no longer available in JSON - csv_record += empty_record if 'metricsCount' not in \ - JSON_DETAIL['com.garmin.activity.details.json.ActivityDetails'] else \ - str(JSON_DETAIL['com.garmin.activity.details.json.ActivityDetails']['metricsCount']) \ - + ',' - csv_record += '\n' + empty_record = "," + csv_record = "" + + csv_record += ( + empty_record + if "activityName" not in a["activity"] + else '"' + a["activity"]["activityName"].replace('"', '""') + '",' + ) + csv_record += ( + empty_record + if "activityDescription" not in a["activity"] + else '"' + a["activity"]["activityDescription"].replace('"', '""') + '",' + ) + csv_record += ( + empty_record + if "startTimeLocal" not in JSON_SUMMARY["summaryDTO"] + else '"' + JSON_SUMMARY["summaryDTO"]["startTimeLocal"] + '",' + ) + csv_record += ( + empty_record + if "elapsedDuration" not in JSON_SUMMARY["summaryDTO"] + else hhmmss_from_seconds(JSON_SUMMARY["summaryDTO"]["elapsedDuration"]) + + "," + ) + csv_record += ( + empty_record + if "movingDuration" not in JSON_SUMMARY["summaryDTO"] + else hhmmss_from_seconds(JSON_SUMMARY["summaryDTO"]["movingDuration"]) + "," + ) + csv_record += ( + empty_record + if "distance" not in JSON_SUMMARY["summaryDTO"] + else "{0:.5f}".format(JSON_SUMMARY["summaryDTO"]["distance"] / 1000) + "," + ) + csv_record += ( + empty_record + if "averageSpeed" not in JSON_SUMMARY["summaryDTO"] + else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["averageSpeed"]) + "," + ) + csv_record += ( + empty_record + if "averageMovingSpeed" not in JSON_SUMMARY["summaryDTO"] + else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["averageMovingSpeed"]) + "," + ) + csv_record += ( + empty_record + if "maxSpeed" not in JSON_SUMMARY["summaryDTO"] + else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["maxSpeed"]) + "," + ) + csv_record += ( + empty_record + if "elevationLoss" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["elevationLoss"]) + "," + ) + csv_record += ( + empty_record + if "elevationGain" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["elevationGain"]) + "," + ) + csv_record += ( + empty_record + if "minElevation" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["minElevation"]) + "," + ) + csv_record += ( + empty_record + if "maxElevation" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["maxElevation"]) + "," + ) + csv_record += ( + empty_record if "minHR" not in JSON_SUMMARY["summaryDTO"] else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "maxHR" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["maxHR"]) + "," + ) + csv_record += ( + empty_record + if "averageHR" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["averageHR"]) + "," + ) + csv_record += ( + empty_record + if "calories" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["calories"]) + "," + ) + csv_record += ( + empty_record + if "averageBikeCadence" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["averageBikeCadence"]) + "," + ) + csv_record += ( + empty_record + if "maxBikeCadence" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["maxBikeCadence"]) + "," + ) + csv_record += ( + empty_record + if "totalNumberOfStrokes" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["totalNumberOfStrokes"]) + "," + ) + csv_record += ( + empty_record + if "averageTemperature" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["averageTemperature"]) + "," + ) + csv_record += ( + empty_record + if "minTemperature" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["minTemperature"]) + "," + ) + csv_record += ( + empty_record + if "maxTemperature" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["maxTemperature"]) + "," + ) + csv_record += ( + empty_record + if "activityId" not in a["activity"] + else '"https://connect.garmin.com/modern/activity/' + + str(a["activity"]["activityId"]) + + '",' + ) + csv_record += ( + empty_record if "endTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," + ) # no longer available in JSON + csv_record += ( + empty_record if "beginTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," + ) # no longer available in JSON + csv_record += ( + empty_record if "endTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "device" not in a["activity"] + else a["activity"]["device"]["display"] + + " " + + a["activity"]["device"]["version"] + + "," + ) + csv_record += ( + empty_record + if "activityType" not in a["activity"] + else a["activity"]["activityType"]["display"] + "," + ) + csv_record += ( + empty_record + if "eventType" not in a["activity"] + else a["activity"]["eventType"]["display"] + "," + ) + csv_record += ( + empty_record + if "activityTimeZone" not in a["activity"] + else a["activity"]["activityTimeZone"]["display"] + "," + ) + csv_record += ( + empty_record + if "startLatitude" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["startLatitude"]) + "," + ) + csv_record += ( + empty_record + if "startLongitude" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["startLongitude"]) + "," + ) + csv_record += ( + empty_record + if "endLatitude" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["endLatitude"]) + "," + ) + csv_record += ( + empty_record + if "endLongitude" not in JSON_SUMMARY["summaryDTO"] + else str(JSON_SUMMARY["summaryDTO"]["endLongitude"]) + "," + ) + csv_record += ( + empty_record + if "gainCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] + else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "lossCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] + else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "maxCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] + else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "minCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] + else "," + ) # no longer available in JSON + csv_record += ( + empty_record + if "metricsCount" + not in JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"] + else str( + JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"][ + "metricsCount" + ] + ) + + "," + ) + csv_record += "\n" CSV_FILE.write(csv_record) - if ARGS.format == 'gpx' and data: + if ARGS.format == "gpx" and data: # Validate GPX data. If we have an activity without GPS data (e.g., running on a # treadmill), Garmin Connect still kicks out a GPX (sometimes), but there is only # activity information, no GPS data. N.B. You can omit the XML parse (and the # associated log messages) to speed things up. gpx = parseString(data) - if gpx.getElementsByTagName('trkpt'): - print('Done. GPX data saved.') + if gpx.getElementsByTagName("trkpt"): + print("Done. GPX data saved.") else: - print('Done. No track points found.') - elif ARGS.format == 'original': + print("Done. No track points found.") + elif ARGS.format == "original": # Even manual upload of a GPX file is zipped, but we'll validate the extension. - if ARGS.unzip and data_filename[-3:].lower() == 'zip': - print("Unzipping and removing original files...", end=' ') - print('Filesize is: ' + str(stat(data_filename).st_size)) + if ARGS.unzip and data_filename[-3:].lower() == "zip": + print("Unzipping and removing original files...", end=" ") + print("Filesize is: " + str(stat(data_filename).st_size)) if stat(data_filename).st_size > 0: - zip_file = open(data_filename, 'rb') + zip_file = open(data_filename, "rb") z = zipfile.ZipFile(zip_file) for name in z.namelist(): z.extract(name, ARGS.directory) zip_file.close() else: - print('Skipping 0Kb zip file.') + print("Skipping 0Kb zip file.") remove(data_filename) - print('Done.') + print("Done.") else: # TODO: Consider validating other formats. - print('Done.') + print("Done.") TOTAL_DOWNLOADED += NUM_TO_DOWNLOAD # End while loop for multiple chunks. CSV_FILE.close() -print('Open CSV output.') +print("Open CSV output.") print(CSV_FILENAME) # open CSV file. Comment this line out if you don't want this behavior -call(["/usr/bin/libreoffice6.0", "--calc", CSV_FILENAME]) +call(["/usr/bin/libreoffice6.1", "--calc", CSV_FILENAME]) -print('Done!') +print("Done!") From a0ac786df0bd2485ca8c870fdb45e488c3f698ad Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sat, 25 Aug 2018 18:11:31 -0700 Subject: [PATCH 19/33] Largish refactor based on deprecation of search URL endpoint (was returning 400). Worked out how to download the device type and version for the ride. Signed-off-by: Michael Payne --- gcexport3.py | 130 +++++++++++++++++++++++++-------------------------- 1 file changed, 63 insertions(+), 67 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 213b7ea..076315b 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -6,7 +6,7 @@ Original author: Kyle Krafka (https://github.com/kjkjava/) Date: April 28, 2015 Fork author: Michael P (https://github.com/moderation/) -Date: February 15, 2018 +Date: August 25, 2018 Description: Use this script to export your fitness data from Garmin Connect. See README.md for more information. @@ -199,9 +199,6 @@ def http_req(url, post=None, headers=None): # URLs for various services. URL_GC_LOGIN = "https://sso.garmin.com/sso/login?" + urllib.parse.urlencode(DATA) URL_GC_POST_AUTH = "https://connect.garmin.com/modern/activities?" -URL_GC_SEARCH = ( - "https://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?" -) URL_GC_LIST = "https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?" URL_GC_ACTIVITY = "https://connect.garmin.com/modern/proxy/activity-service/activity/" URL_GC_ACTIVITY_DETAIL = ( @@ -216,7 +213,9 @@ def http_req(url, post=None, headers=None): URL_GC_ORIGINAL_ACTIVITY = ( "http://connect.garmin.com/proxy/download-service/files/activity/" ) - +URL_DEVICE_DETAIL = ( + "https://connect.garmin.com/modern/proxy/device-service/deviceservice/app-info/" +) # Initially, we need to get a valid session cookie, so we pull the login page. print("Request login page") http_req(URL_GC_LOGIN) @@ -328,52 +327,32 @@ def http_req(url, post=None, headers=None): NUM_TO_DOWNLOAD = TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED SEARCH_PARAMS = {"start": TOTAL_DOWNLOADED, "limit": NUM_TO_DOWNLOAD} + # Query Garmin Connect - print("Making activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print(URL_GC_SEARCH + urllib.parse.urlencode(SEARCH_PARAMS)) - RESULT = http_req(URL_GC_SEARCH + urllib.parse.urlencode(SEARCH_PARAMS)) - print("Finished activity request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - - # Persist JSON - write_to_file(ARGS.directory + "/activities.json", RESULT.decode(), "a") - - JSON_RESULTS = json.loads(RESULT) # TODO: Catch possible exceptions here. - - if DOWNLOAD_ALL: - # Modify TOTAL_TO_DOWNLOAD based on how many activities the server reports. - TOTAL_TO_DOWNLOAD = int(JSON_RESULTS["results"]["totalFound"]) - - # Do it only once. - DOWNLOAD_ALL = False - - # Pull out just the list of activities. - ACTIVITIES = JSON_RESULTS["results"]["activities"] - # print(ACTIVITIES) - print("Activity list URL: " + URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) ACTIVITY_LIST = http_req(URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) write_to_file(ARGS.directory + "/activity_list.json", ACTIVITY_LIST.decode(), "a") - # LIST = json.loads(ACTIVITY_LIST) + LIST = json.loads(ACTIVITY_LIST) # print(LIST) # Process each activity. - for a in ACTIVITIES: + for a in LIST: # Display which entry we're working on. print( - "Garmin Connect activity: [" + str(a["activity"]["activityId"]) + "]", + "Garmin Connect activity: [" + str(a["activityId"]) + "]", end=" ", ) - print(a["activity"]["activityName"]) - print("\t" + a["activity"]["uploadDate"]["display"] + ",", end=" ") + print(a["activityName"]) + # print("\t" + a["uploadDate"]["display"] + ",", end=" ") if ARGS.format == "gpx": data_filename = ( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity.gpx" ) download_url = ( - URL_GC_GPX_ACTIVITY + str(a["activity"]["activityId"]) + "?full=true" + URL_GC_GPX_ACTIVITY + str(a["activityId"]) + "?full=true" ) print(download_url) file_mode = "w" @@ -381,27 +360,27 @@ def http_req(url, post=None, headers=None): data_filename = ( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity.tcx" ) download_url = ( - URL_GC_TCX_ACTIVITY + str(a["activity"]["activityId"]) + "?full=true" + URL_GC_TCX_ACTIVITY + str(a["activityId"]) + "?full=true" ) file_mode = "w" elif ARGS.format == "original": data_filename = ( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity.zip" ) fit_filename = ( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity.fit" ) - download_url = URL_GC_ORIGINAL_ACTIVITY + str(a["activity"]["activityId"]) + download_url = URL_GC_ORIGINAL_ACTIVITY + str(a["activityId"]) file_mode = "wb" else: raise Exception("Unrecognized format.") @@ -458,13 +437,13 @@ def http_req(url, post=None, headers=None): print( "Activity summary URL: " + URL_GC_ACTIVITY - + str(a["activity"]["activityId"]) + + str(a["activityId"]) ) - ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activity"]["activityId"])) + ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activityId"])) write_to_file( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity_summary.json", ACTIVITY_SUMMARY.decode(), "a", @@ -472,18 +451,35 @@ def http_req(url, post=None, headers=None): JSON_SUMMARY = json.loads(ACTIVITY_SUMMARY) # print(JSON_SUMMARY) + print( + "Device detail URL: " + + URL_DEVICE_DETAIL + + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]) + ) + DEVICE_DETAIL = http_req(URL_DEVICE_DETAIL + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"])) + write_to_file( + ARGS.directory + + "/" + + str(a["activityId"]) + + "_app_info.json", + DEVICE_DETAIL.decode(), + "a", + ) + JSON_DEVICE = json.loads(DEVICE_DETAIL) + # print(JSON_DEVICE) + print( "Activity details URL: " + URL_GC_ACTIVITY_DETAIL - + str(a["activity"]["activityId"]) + + str(a["activityId"]) ) ACTIVITY_DETAIL = http_req( - URL_GC_ACTIVITY_DETAIL + str(a["activity"]["activityId"]) + URL_GC_ACTIVITY_DETAIL + str(a["activityId"]) ) write_to_file( ARGS.directory + "/" - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + "_activity_detail.json", ACTIVITY_DETAIL.decode(), "a", @@ -497,13 +493,13 @@ def http_req(url, post=None, headers=None): csv_record += ( empty_record - if "activityName" not in a["activity"] - else '"' + a["activity"]["activityName"].replace('"', '""') + '",' + if "activityName" not in a + else '"' + a["activityName"].replace('"', '""') + '",' ) csv_record += ( empty_record - if "activityDescription" not in a["activity"] - else '"' + a["activity"]["activityDescription"].replace('"', '""') + '",' + if "activityDescription" not in a + else '"' + a["activityDescription"].replace('"', '""') + '",' ) csv_record += ( empty_record @@ -563,7 +559,7 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record if "minHR" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "maxHR" not in JSON_SUMMARY["summaryDTO"] @@ -611,42 +607,42 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record - if "activityId" not in a["activity"] + if "activityId" not in a else '"https://connect.garmin.com/modern/activity/' - + str(a["activity"]["activityId"]) + + str(a["activityId"]) + '",' ) csv_record += ( empty_record if "endTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "beginTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "endTimestamp" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record - if "device" not in a["activity"] - else a["activity"]["device"]["display"] + if "productDisplayName" not in JSON_DEVICE + else JSON_DEVICE["productDisplayName"] + " " - + a["activity"]["device"]["version"] + + JSON_DEVICE["versionString"] + "," ) csv_record += ( empty_record - if "activityType" not in a["activity"] - else a["activity"]["activityType"]["display"] + "," + if "activityType" not in a + else a["activityType"]["typeKey"].title() + "," ) csv_record += ( empty_record - if "eventType" not in a["activity"] - else a["activity"]["eventType"]["display"] + "," + if "eventType" not in a + else a["eventType"]["typeKey"].title() + "," ) csv_record += ( empty_record - if "activityTimeZone" not in a["activity"] - else a["activity"]["activityTimeZone"]["display"] + "," + if "timeZoneUnitDTO" not in JSON_SUMMARY + else JSON_SUMMARY["timeZoneUnitDTO"]["timeZone"] + "," ) csv_record += ( empty_record @@ -672,22 +668,22 @@ def http_req(url, post=None, headers=None): empty_record if "gainCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "lossCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "maxCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "minCorrectedElevation" not in JSON_SUMMARY["summaryDTO"] else "," - ) # no longer available in JSON + ) csv_record += ( empty_record if "metricsCount" From d4a625bd365ed8c4b2c0e3143084fcd860ebd4fa Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sun, 26 Aug 2018 15:30:19 -0700 Subject: [PATCH 20/33] Fix description. Signed-off-by: Michael Payne --- gcexport3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 076315b..70c962b 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -498,8 +498,8 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record - if "activityDescription" not in a - else '"' + a["activityDescription"].replace('"', '""') + '",' + if "description" not in a + else '"' + a["description"].replace('"', '""') + '",' ) csv_record += ( empty_record From 0dc50428d53040be24d522edc9851a73dd391420 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Mon, 27 Aug 2018 11:18:11 -0700 Subject: [PATCH 21/33] Handle description being null. Use @pe-st's method for extracting total number of rides. Linting via `black`. Signed-off-by: Michael Payne --- gcexport3.py | 130 ++++++++++++++++++++++++--------------------------- 1 file changed, 61 insertions(+), 69 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 70c962b..6d06db2 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -199,6 +199,10 @@ def http_req(url, post=None, headers=None): # URLs for various services. URL_GC_LOGIN = "https://sso.garmin.com/sso/login?" + urllib.parse.urlencode(DATA) URL_GC_POST_AUTH = "https://connect.garmin.com/modern/activities?" +URL_GC_PROFILE = "https://connect.garmin.com/modern/profile" +URL_GC_USERSTATS = ( + "https://connect.garmin.com/modern/proxy/userstats-service/statistics/" +) URL_GC_LIST = "https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?" URL_GC_ACTIVITY = "https://connect.garmin.com/modern/proxy/activity-service/activity/" URL_GC_ACTIVITY_DETAIL = ( @@ -245,7 +249,7 @@ def http_req(url, post=None, headers=None): you enter the correct username and password?" ) LOGIN_TICKET = MATCH.group(1) -print("login ticket=" + LOGIN_TICKET) +print("Login ticket=" + LOGIN_TICKET) print("Request authentication URL: " + URL_GC_POST_AUTH + "ticket=" + LOGIN_TICKET) http_req(URL_GC_POST_AUTH + "ticket=" + LOGIN_TICKET) @@ -307,11 +311,34 @@ def http_req(url, post=None, headers=None): DOWNLOAD_ALL = False if ARGS.count == "all": - # If the user wants to download all activities, first download one, - # then the result of that request will tell us how many are available - # so we will modify the variables then. - TOTAL_TO_DOWNLOAD = 1 - DOWNLOAD_ALL = True + # If the user wants to download all activities, query the userstats + # on the profile page to know how many are available + print("Getting display name and user stats ~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(URL_GC_PROFILE) + PROFILE_PAGE = http_req(URL_GC_PROFILE) + # write_to_file(args.directory + '/profile.html', profile_page, 'a') + + # extract the display name from the profile page, it should be in there as + # \"displayName\":\"eschep\" + PATTERN = re.compile( + r".*\\\"displayName\\\":\\\"([-\w]+)\\\".*", re.MULTILINE | re.DOTALL + ) + MATCH = PATTERN.match(PROFILE_PAGE) + if not MATCH: + raise Exception("Did not find the display name in the profile page.") + DISPLAY_NAME = MATCH.group(1) + print("displayName=" + DISPLAY_NAME) + + print(URL_GC_USERSTATS + display_name) + USER_STATS = http_req(URL_GC_USERSTATS + DISPLAY_NAME) + print("Finished display name and user stats ~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + # Persist JSON + write_to_file(ARGS.directory + "/userstats.json", USER_STATS, "w") + + # Modify total_to_download based on how many activities the server reports. + JSON_USER = json.loads(USER_STATS) + TOTAL_TO_DOWNLOAD = int(JSON_USER["userMetrics"][0]["totalActivities"]) else: TOTAL_TO_DOWNLOAD = int(ARGS.count) TOTAL_DOWNLOADED = 0 @@ -327,7 +354,7 @@ def http_req(url, post=None, headers=None): NUM_TO_DOWNLOAD = TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED SEARCH_PARAMS = {"start": TOTAL_DOWNLOADED, "limit": NUM_TO_DOWNLOAD} - + # Query Garmin Connect print("Activity list URL: " + URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) ACTIVITY_LIST = http_req(URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS)) @@ -338,48 +365,27 @@ def http_req(url, post=None, headers=None): # Process each activity. for a in LIST: # Display which entry we're working on. - print( - "Garmin Connect activity: [" + str(a["activityId"]) + "]", - end=" ", - ) + print("Garmin Connect activity: [" + str(a["activityId"]) + "]", end=" ") print(a["activityName"]) # print("\t" + a["uploadDate"]["display"] + ",", end=" ") if ARGS.format == "gpx": data_filename = ( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity.gpx" - ) - download_url = ( - URL_GC_GPX_ACTIVITY + str(a["activityId"]) + "?full=true" + ARGS.directory + "/" + str(a["activityId"]) + "_activity.gpx" ) + download_url = URL_GC_GPX_ACTIVITY + str(a["activityId"]) + "?full=true" print(download_url) file_mode = "w" elif ARGS.format == "tcx": data_filename = ( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity.tcx" - ) - download_url = ( - URL_GC_TCX_ACTIVITY + str(a["activityId"]) + "?full=true" + ARGS.directory + "/" + str(a["activityId"]) + "_activity.tcx" ) + download_url = URL_GC_TCX_ACTIVITY + str(a["activityId"]) + "?full=true" file_mode = "w" elif ARGS.format == "original": data_filename = ( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity.zip" - ) - fit_filename = ( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity.fit" + ARGS.directory + "/" + str(a["activityId"]) + "_activity.zip" ) + fit_filename = ARGS.directory + "/" + str(a["activityId"]) + "_activity.fit" download_url = URL_GC_ORIGINAL_ACTIVITY + str(a["activityId"]) file_mode = "wb" else: @@ -434,17 +440,10 @@ def http_req(url, post=None, headers=None): # Persist file write_to_file(data_filename, data.decode(), file_mode) - print( - "Activity summary URL: " - + URL_GC_ACTIVITY - + str(a["activityId"]) - ) + print("Activity summary URL: " + URL_GC_ACTIVITY + str(a["activityId"])) ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activityId"])) write_to_file( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity_summary.json", + ARGS.directory + "/" + str(a["activityId"]) + "_activity_summary.json", ACTIVITY_SUMMARY.decode(), "a", ) @@ -456,31 +455,22 @@ def http_req(url, post=None, headers=None): + URL_DEVICE_DETAIL + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]) ) - DEVICE_DETAIL = http_req(URL_DEVICE_DETAIL + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"])) + DEVICE_DETAIL = http_req( + URL_DEVICE_DETAIL + + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]) + ) write_to_file( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_app_info.json", + ARGS.directory + "/" + str(a["activityId"]) + "_app_info.json", DEVICE_DETAIL.decode(), "a", ) JSON_DEVICE = json.loads(DEVICE_DETAIL) # print(JSON_DEVICE) - print( - "Activity details URL: " - + URL_GC_ACTIVITY_DETAIL - + str(a["activityId"]) - ) - ACTIVITY_DETAIL = http_req( - URL_GC_ACTIVITY_DETAIL + str(a["activityId"]) - ) + print("Activity details URL: " + URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) + ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) write_to_file( - ARGS.directory - + "/" - + str(a["activityId"]) - + "_activity_detail.json", + ARGS.directory + "/" + str(a["activityId"]) + "_activity_detail.json", ACTIVITY_DETAIL.decode(), "a", ) @@ -496,11 +486,15 @@ def http_req(url, post=None, headers=None): if "activityName" not in a else '"' + a["activityName"].replace('"', '""') + '",' ) - csv_record += ( - empty_record - if "description" not in a - else '"' + a["description"].replace('"', '""') + '",' - ) + + # maybe a more elegant way of coding this but need to handle description as null + if "description" not in a: + csv_record += empty_record + elif a["description"] is not None: + csv_record += '"' + a["description"].replace('"', '""') + '",' + else: + csv_record += empty_record + csv_record += ( empty_record if "startTimeLocal" not in JSON_SUMMARY["summaryDTO"] @@ -557,9 +551,7 @@ def http_req(url, post=None, headers=None): if "maxElevation" not in JSON_SUMMARY["summaryDTO"] else str(JSON_SUMMARY["summaryDTO"]["maxElevation"]) + "," ) - csv_record += ( - empty_record if "minHR" not in JSON_SUMMARY["summaryDTO"] else "," - ) + csv_record += empty_record if "minHR" not in JSON_SUMMARY["summaryDTO"] else "," csv_record += ( empty_record if "maxHR" not in JSON_SUMMARY["summaryDTO"] From 925f8348de43c0be28bdbc2fbd85243e31adaf08 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Mon, 27 Aug 2018 12:25:41 -0700 Subject: [PATCH 22/33] Test and make work downloading all activities. Signed-off-by: Michael Payne --- gcexport3.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 6d06db2..cb2241d 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -313,9 +313,8 @@ def http_req(url, post=None, headers=None): if ARGS.count == "all": # If the user wants to download all activities, query the userstats # on the profile page to know how many are available - print("Getting display name and user stats ~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print(URL_GC_PROFILE) - PROFILE_PAGE = http_req(URL_GC_PROFILE) + print("Getting display name and user stats via: " + URL_GC_PROFILE) + PROFILE_PAGE = http_req(URL_GC_PROFILE).decode() # write_to_file(args.directory + '/profile.html', profile_page, 'a') # extract the display name from the profile page, it should be in there as @@ -329,19 +328,21 @@ def http_req(url, post=None, headers=None): DISPLAY_NAME = MATCH.group(1) print("displayName=" + DISPLAY_NAME) - print(URL_GC_USERSTATS + display_name) + print(URL_GC_USERSTATS + DISPLAY_NAME) USER_STATS = http_req(URL_GC_USERSTATS + DISPLAY_NAME) print("Finished display name and user stats ~~~~~~~~~~~~~~~~~~~~~~~~~~~") # Persist JSON - write_to_file(ARGS.directory + "/userstats.json", USER_STATS, "w") + write_to_file(ARGS.directory + "/userstats.json", USER_STATS.decode(), "a") # Modify total_to_download based on how many activities the server reports. JSON_USER = json.loads(USER_STATS) TOTAL_TO_DOWNLOAD = int(JSON_USER["userMetrics"][0]["totalActivities"]) else: TOTAL_TO_DOWNLOAD = int(ARGS.count) + TOTAL_DOWNLOADED = 0 +print("Total to download: " + str(TOTAL_TO_DOWNLOAD)) # This while loop will download data from the server in multiple chunks, if necessary. while TOTAL_DOWNLOADED < TOTAL_TO_DOWNLOAD: From ff23760d8abb632251c45ff2d1981692e0d84f68 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Mon, 27 Aug 2018 22:12:13 -0700 Subject: [PATCH 23/33] Update gitignore. Signed-off-by: Michael Payne --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 5dc43a8..7452e45 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,3 @@ - .DS_Store extras/ +*_garmin_connect_export/ From 8a04e533d8e2b6b7dce75de1acb356c2b4890836 Mon Sep 17 00:00:00 2001 From: Christian Moelders Date: Wed, 29 Aug 2018 22:46:37 +0200 Subject: [PATCH 24/33] Adds support for display names containing dots --- gcexport3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcexport3.py b/gcexport3.py index cb2241d..8423d20 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -320,7 +320,7 @@ def http_req(url, post=None, headers=None): # extract the display name from the profile page, it should be in there as # \"displayName\":\"eschep\" PATTERN = re.compile( - r".*\\\"displayName\\\":\\\"([-\w]+)\\\".*", re.MULTILINE | re.DOTALL + r".*\\\"displayName\\\":\\\"([-.\w]+)\\\".*", re.MULTILINE | re.DOTALL ) MATCH = PATTERN.match(PROFILE_PAGE) if not MATCH: From b425eb187bdeac7bf93aa1635b7e392f90db35fe Mon Sep 17 00:00:00 2001 From: Christian Moelders Date: Thu, 30 Aug 2018 00:29:45 +0200 Subject: [PATCH 25/33] Decide whether a response from Garmin Connect has to be decoded or not. Closes #14 --- gcexport3.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/gcexport3.py b/gcexport3.py index cb2241d..b62d9ad 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -117,6 +117,17 @@ def write_to_file(filename, content, mode): write_file.write(content) write_file.close() +def decoding_decider(data): + """Helper function that decides if a decoding should happen or not.""" + if ARGS.format == "original": + # An original file (ZIP file) is binary and not UTF-8 encoded + data = data + else: + # GPX and TCX are textfiles and UTF-8 encoded + data = data.decode() + + return data + # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers=None): @@ -439,7 +450,7 @@ def http_req(url, post=None, headers=None): ) # Persist file - write_to_file(data_filename, data.decode(), file_mode) + write_to_file(data_filename, decoding_decider(data), file_mode) print("Activity summary URL: " + URL_GC_ACTIVITY + str(a["activityId"])) ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activityId"])) From 5a75cec7306470a6016c0ca9848581edc38e8c14 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Fri, 31 Aug 2018 13:02:59 -0700 Subject: [PATCH 26/33] Linting by black. Signed-off-by: Michael Payne --- gcexport3.py | 1 + 1 file changed, 1 insertion(+) diff --git a/gcexport3.py b/gcexport3.py index cac38d3..b785cab 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -117,6 +117,7 @@ def write_to_file(filename, content, mode): write_file.write(content) write_file.close() + def decoding_decider(data): """Helper function that decides if a decoding should happen or not.""" if ARGS.format == "original": From 18cd3b115ade1b3968fb57bfed2f546e78fb0a6c Mon Sep 17 00:00:00 2001 From: Peter Steiner Date: Wed, 5 Sep 2018 18:04:10 +0200 Subject: [PATCH 27/33] Do nothing in decoding_decider for empty GPX data https://github.com/moderation/garmin-connect-export/issues/21 --- .gitignore | 1 + gcexport3.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7452e45..d07d90b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .DS_Store extras/ *_garmin_connect_export/ +*.pyc diff --git a/gcexport3.py b/gcexport3.py index b785cab..960d463 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -123,7 +123,7 @@ def decoding_decider(data): if ARGS.format == "original": # An original file (ZIP file) is binary and not UTF-8 encoded data = data - else: + elif data: # GPX and TCX are textfiles and UTF-8 encoded data = data.decode() From 63f62dc1239becd89471b166bd219f446c249895 Mon Sep 17 00:00:00 2001 From: Peter Steiner Date: Wed, 5 Sep 2018 22:42:25 +0200 Subject: [PATCH 28/33] Add more error handling - when DEVICE_DETAIL is empty/None - when ACTIVITY_DETAIL gets HTTP error 500 - when a['activityName'] is None --- gcexport3.py | 44 ++++++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 960d463..792c76d 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -472,23 +472,31 @@ def http_req(url, post=None, headers=None): URL_DEVICE_DETAIL + str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]) ) - write_to_file( - ARGS.directory + "/" + str(a["activityId"]) + "_app_info.json", - DEVICE_DETAIL.decode(), - "a", - ) - JSON_DEVICE = json.loads(DEVICE_DETAIL) - # print(JSON_DEVICE) + if DEVICE_DETAIL: + write_to_file( + ARGS.directory + "/" + str(a["activityId"]) + "_app_info.json", + DEVICE_DETAIL.decode(), + "a", + ) + JSON_DEVICE = json.loads(DEVICE_DETAIL) + # print(JSON_DEVICE) + else: + print("Retrieving Device Details failed.") + JSON_DEVICE = None print("Activity details URL: " + URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) - ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) - write_to_file( - ARGS.directory + "/" + str(a["activityId"]) + "_activity_detail.json", - ACTIVITY_DETAIL.decode(), - "a", - ) - JSON_DETAIL = json.loads(ACTIVITY_DETAIL) - # print(JSON_DETAIL) + try: + ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) + write_to_file( + ARGS.directory + "/" + str(a["activityId"]) + "_activity_detail.json", + ACTIVITY_DETAIL.decode(), + "a", + ) + JSON_DETAIL = json.loads(ACTIVITY_DETAIL) + # print(JSON_DETAIL) + except: + print("Retrieving Activity Details failed.") + JSON_DETAIL = None # Write stats to CSV. empty_record = "," @@ -496,7 +504,7 @@ def http_req(url, post=None, headers=None): csv_record += ( empty_record - if "activityName" not in a + if "activityName" not in a or not a["activityName"] else '"' + a["activityName"].replace('"', '""') + '",' ) @@ -628,7 +636,7 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record - if "productDisplayName" not in JSON_DEVICE + if not JSON_DEVICE or "productDisplayName" not in JSON_DEVICE else JSON_DEVICE["productDisplayName"] + " " + JSON_DEVICE["versionString"] @@ -691,7 +699,7 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record - if "metricsCount" + if not JSON_DETAIL or "metricsCount" not in JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"] else str( JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"][ From 9584e9d335601ace42b2011f6e95c6e162fef3ec Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sat, 8 Sep 2018 19:03:55 -0700 Subject: [PATCH 29/33] Make launching of external application for CSV and opt-in option and allow for passing in of application and arguments. Signed-off-by: Michael Payne --- gcexport3.py | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 960d463..669958c 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -44,14 +44,12 @@ PARSER.add_argument("--version", help="print version and exit", action="store_true") PARSER.add_argument( "--username", - help="your Garmin Connect username (otherwise, you will be \ - prompted)", + help="your Garmin Connect username (otherwise, you will be prompted)", nargs="?", ) PARSER.add_argument( "--password", - help="your Garmin Connect password (otherwise, you will be \ - prompted)", + help="your Garmin Connect password (otherwise, you will be prompted)", nargs="?", ) PARSER.add_argument( @@ -59,8 +57,21 @@ "--count", nargs="?", default="1", - help="number of recent activities to \ - download, or 'all' (default: 1)", + help="number of recent activities to download, or 'all' (default: 1)", +) +PARSER.add_argument( + "-e", + "--external", + nargs="?", + default="", + help="path to external program to pass CSV file too (default: )", +) +PARSER.add_argument( + "-a", + "--args", + nargs="?", + default="", + help="additional arguments to pass to external program (default: )", ) PARSER.add_argument( "-f", @@ -75,14 +86,12 @@ "--directory", nargs="?", default=ACTIVITIES_DIRECTORY, - help="the \ - directory to export to (default: './YYYY-MM-DD_garmin_connect_export')", + help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')", ) PARSER.add_argument( "-u", "--unzip", - help="if downloading ZIP files (format: 'original'), unzip \ - the file and removes the ZIP file", + help="if downloading ZIP files (format: 'original'), unzip the file and removes the ZIP file", action="store_true", ) @@ -737,9 +746,10 @@ def http_req(url, post=None, headers=None): CSV_FILE.close() -print("Open CSV output.") -print(CSV_FILENAME) -# open CSV file. Comment this line out if you don't want this behavior -call(["/usr/bin/libreoffice6.1", "--calc", CSV_FILENAME]) +if len(ARGS.external): + print("Open CSV output.") + print(CSV_FILENAME) + # open CSV file. Comment this line out if you don't want this behavior + call([ARGS.external, "--" + ARGS.args, CSV_FILENAME]) print("Done!") From fb946822d305e5dfd8b1ba9ae1dc4a050d19a716 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sun, 18 Nov 2018 17:02:09 -0800 Subject: [PATCH 30/33] Replace deprecated activity detail endpoint with new API and update JSON. Signed-off-by: Michael Payne --- gcexport3.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 89f792d..8093fbf 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -226,9 +226,6 @@ def http_req(url, post=None, headers=None): ) URL_GC_LIST = "https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?" URL_GC_ACTIVITY = "https://connect.garmin.com/modern/proxy/activity-service/activity/" -URL_GC_ACTIVITY_DETAIL = ( - "https://connect.garmin.com/modern/proxy/activity-service-1.3/json/activityDetails/" -) URL_GC_GPX_ACTIVITY = ( "https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/" ) @@ -493,9 +490,16 @@ def http_req(url, post=None, headers=None): print("Retrieving Device Details failed.") JSON_DEVICE = None - print("Activity details URL: " + URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) + print( + "Activity details URL: " + + URL_GC_ACTIVITY + + str(a["activityId"]) + + "/details" + ) try: - ACTIVITY_DETAIL = http_req(URL_GC_ACTIVITY_DETAIL + str(a["activityId"])) + ACTIVITY_DETAIL = http_req( + URL_GC_ACTIVITY + str(a["activityId"]) + "/details" + ) write_to_file( ARGS.directory + "/" + str(a["activityId"]) + "_activity_detail.json", ACTIVITY_DETAIL.decode(), @@ -708,14 +712,8 @@ def http_req(url, post=None, headers=None): ) csv_record += ( empty_record - if not JSON_DETAIL or "metricsCount" - not in JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"] - else str( - JSON_DETAIL["com.garmin.activity.details.json.ActivityDetails"][ - "metricsCount" - ] - ) - + "," + if not JSON_DETAIL or "metricsCount" not in JSON_DETAIL + else str(JSON_DETAIL["metricsCount"]) + "," ) csv_record += "\n" From fadae4eb18e47c3402826bc66f420ae0c5ed74f1 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Thu, 3 Jan 2019 22:10:45 -0800 Subject: [PATCH 31/33] Add new URL to retrieve gear detail and create column for Bike name. Signed-off-by: Michael Payne --- gcexport3.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/gcexport3.py b/gcexport3.py index 8093fbf..70f2750 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -238,6 +238,9 @@ def http_req(url, post=None, headers=None): URL_DEVICE_DETAIL = ( "https://connect.garmin.com/modern/proxy/device-service/deviceservice/app-info/" ) +URL_GEAR_DETAIL = ( + "https://connect.garmin.com/modern/proxy/gear-service/gear/filterGear?" +) # Initially, we need to get a valid session cookie, so we pull the login page. print("Request login page") http_req(URL_GC_LOGIN) @@ -287,6 +290,7 @@ def http_req(url, post=None, headers=None): CSV_FILE.write( "Activity name,\ Description,\ +Bike,\ Begin timestamp,\ Duration (h:m:s),\ Moving duration (h:m:s),\ @@ -511,6 +515,27 @@ def http_req(url, post=None, headers=None): print("Retrieving Activity Details failed.") JSON_DETAIL = None + print( + "Gear details URL: " + + URL_GEAR_DETAIL + + "activityId=" + + str(a["activityId"]) + ) + try: + GEAR_DETAIL = http_req( + URL_GEAR_DETAIL + "activityId=" + str(a["activityId"]) + ) + write_to_file( + ARGS.directory + "/" + str(a["activityId"]) + "_gear_detail.json", + GEAR_DETAIL.decode(), + "a", + ) + JSON_GEAR = json.loads(GEAR_DETAIL) + # print(JSON_GEAR) + except: + print("Retrieving Gear Details failed.") + # JSON_GEAR = None + # Write stats to CSV. empty_record = "," csv_record = "" @@ -529,6 +554,12 @@ def http_req(url, post=None, headers=None): else: csv_record += empty_record + # Gear detail returned as an array so pick the first one + csv_record += ( + empty_record + if not JSON_GEAR or "customMakeModel" not in JSON_GEAR[0] + else JSON_GEAR[0]["customMakeModel"] + "," + ) csv_record += ( empty_record if "startTimeLocal" not in JSON_SUMMARY["summaryDTO"] From c381d39da7209bd4e7dd21f193513f5020148cb1 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Fri, 19 Apr 2019 19:23:34 -0700 Subject: [PATCH 32/33] Fix latest login change. Signed-off-by: Michael Payne --- gcexport3.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index 70f2750..fd46cad 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -187,8 +187,8 @@ def http_req(url, post=None, headers=None): LIMIT_MAXIMUM = 1000 WEBHOST = "https://connect.garmin.com" -REDIRECT = "https://connect.garmin.com/post-auth/login" -BASE_URL = "http://connect.garmin.com/en-US/signin" +REDIRECT = "https://connect.garmin.com/modern/" +BASE_URL = "https://connect.garmin.com/en-US/signin" SSO = "https://sso.garmin.com/sso" CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.2-min.css" @@ -207,18 +207,25 @@ def http_req(url, post=None, headers=None): "rememberMeChecked": "false", "createAccountShown": "true", "openCreateAccount": "false", - "usernameShown": "false", "displayNameShown": "false", "consumeServiceTicket": "false", "initialFocus": "true", "embedWidget": "false", - "generateExtraServiceTicket": "false", + "generateExtraServiceTicket": "true", + "generateTwoExtraServiceTickets": "false", + "generateNoServiceTicket": "false", + "globalOptInShown": "true", + "globalOptInChecked": "false", + "mobile": "false", + "connectLegalTerms": "true", + "locationPromptShown": "true", + "showPassword": "true", } print(urllib.parse.urlencode(DATA)) # URLs for various services. -URL_GC_LOGIN = "https://sso.garmin.com/sso/login?" + urllib.parse.urlencode(DATA) +URL_GC_LOGIN = "https://sso.garmin.com/sso/signin?" + urllib.parse.urlencode(DATA) URL_GC_POST_AUTH = "https://connect.garmin.com/modern/activities?" URL_GC_PROFILE = "https://connect.garmin.com/modern/profile" URL_GC_USERSTATS = ( @@ -251,14 +258,14 @@ def http_req(url, post=None, headers=None): POST_DATA = { "username": USERNAME, "password": PASSWORD, - "embed": "true", - "lt": "e1s1", - "_eventId": "submit", - "displayNameRequired": "false", + "embed": "false", + "rememberme": "on", } +HEADERS = {"referer": URL_GC_LOGIN} + print("Post login data") -LOGIN_RESPONSE = http_req(URL_GC_LOGIN, POST_DATA).decode() +LOGIN_RESPONSE = http_req(URL_GC_LOGIN + "#", POST_DATA, HEADERS).decode() print("Finish login post") # extract the ticket from the login response From 2ae255d38f1f51400f957d1b567ec859a0671489 Mon Sep 17 00:00:00 2001 From: Michael Payne Date: Sun, 10 May 2020 08:44:07 -0700 Subject: [PATCH 33/33] Latest Black formatting changes. Signed-off-by: Michael Payne --- gcexport3.py | 77 +++++++++++++++++----------------------------------- 1 file changed, 25 insertions(+), 52 deletions(-) diff --git a/gcexport3.py b/gcexport3.py index fd46cad..582ee1e 100755 --- a/gcexport3.py +++ b/gcexport3.py @@ -91,7 +91,10 @@ PARSER.add_argument( "-u", "--unzip", - help="if downloading ZIP files (format: 'original'), unzip the file and removes the ZIP file", + help=( + "if downloading ZIP files (format: 'original'), unzip the file and removes the" + " ZIP file" + ), action="store_true", ) @@ -108,7 +111,7 @@ def hhmmss_from_seconds(sec): """Helper function that converts seconds to HH:MM:SS time format.""" - if isinstance(sec, (float)): + if isinstance(sec, float): formatted_time = str(timedelta(seconds=int(sec))).zfill(8) else: formatted_time = "0.000" @@ -146,8 +149,8 @@ def http_req(url, post=None, headers=None): # Tell Garmin we're some supported browser. request.add_header( "User-Agent", - "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, \ - like Gecko) Chrome/54.0.2816.0 Safari/537.36", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko)" + " Chrome/54.0.2816.0 Safari/537.36", ) if headers: for header_key, header_value in headers.items(): @@ -157,7 +160,7 @@ def http_req(url, post=None, headers=None): post = post.encode("utf-8") # Convert dictionary to POST parameter string. # print("request.headers: " + str(request.headers) + " COOKIE_JAR: " + str(COOKIE_JAR)) # print("post: " + str(post) + "request: " + str(request)) - response = OPENER.open((request), data=post) + response = OPENER.open(request, data=post) if response.getcode() == 204: # For activities without GPS coordinates, there is no GPX download (204 = no content). @@ -176,8 +179,8 @@ def http_req(url, post=None, headers=None): # Create directory for data files. if isdir(ARGS.directory): print( - "Warning: Output directory already exists. Will skip already-downloaded files and \ -append to the CSV file." + "Warning: Output directory already exists. Will skip already-downloaded files" + " and append to the CSV file." ) USERNAME = ARGS.username if ARGS.username else input("Username: ") @@ -273,8 +276,8 @@ def http_req(url, post=None, headers=None): MATCH = PATTERN.match(LOGIN_RESPONSE) if not MATCH: raise Exception( - "Did not get a ticket in the login response. Cannot log in. Did \ -you enter the correct username and password?" + "Did not get a ticket in the login response. Cannot log in. Did you enter the" + " correct username and password?" ) LOGIN_TICKET = MATCH.group(1) print("Login ticket=" + LOGIN_TICKET) @@ -295,47 +298,17 @@ def http_req(url, post=None, headers=None): # Write header to CSV file if not CSV_EXISTED: CSV_FILE.write( - "Activity name,\ -Description,\ -Bike,\ -Begin timestamp,\ -Duration (h:m:s),\ -Moving duration (h:m:s),\ -Distance (km),\ -Average speed (km/h),\ -Average moving speed (km/h),\ -Max. speed (km/h),\ -Elevation loss uncorrected (m),\ -Elevation gain uncorrected (m),\ -Elevation min. uncorrected (m),\ -Elevation max. uncorrected (m),\ -Min. heart rate (bpm),\ -Max. heart rate (bpm),\ -Average heart rate (bpm),\ -Calories,\ -Avg. cadence (rpm),\ -Max. cadence (rpm),\ -Strokes,\ -Avg. temp (°C),\ -Min. temp (°C),\ -Max. temp (°C),\ -Map,\ -End timestamp,\ -Begin timestamp (ms),\ -End timestamp (ms),\ -Device,\ -Activity type,\ -Event type,\ -Time zone,\ -Begin latitude (°DD),\ -Begin longitude (°DD),\ -End latitude (°DD),\ -End longitude (°DD),\ -Elevation gain corrected (m),\ -Elevation loss corrected (m),\ -Elevation max. corrected (m),\ -Elevation min. corrected (m),\ -Sample count\n" + "Activity name,Description,Bike,Begin timestamp,Duration (h:m:s),Moving" + " duration (h:m:s),Distance (km),Average speed (km/h),Average moving speed" + " (km/h),Max. speed (km/h),Elevation loss uncorrected (m),Elevation gain" + " uncorrected (m),Elevation min. uncorrected (m),Elevation max. uncorrected" + " (m),Min. heart rate (bpm),Max. heart rate (bpm),Average heart rate" + " (bpm),Calories,Avg. cadence (rpm),Max. cadence (rpm),Strokes,Avg. temp" + " (°C),Min. temp (°C),Max. temp (°C),Map,End timestamp,Begin timestamp (ms),End" + " timestamp (ms),Device,Activity type,Event type,Time zone,Begin latitude" + " (°DD),Begin longitude (°DD),End latitude (°DD),End longitude (°DD),Elevation" + " gain corrected (m),Elevation loss corrected (m),Elevation max. corrected" + " (m),Elevation min. corrected (m),Sample count\n" ) DOWNLOAD_ALL = False @@ -446,8 +419,8 @@ def http_req(url, post=None, headers=None): # format if you want actual data in every file, as I believe Garmin provides a GPX # file for every activity. print( - "Writing empty file since Garmin did not generate a TCX file for this \ -activity...", + "Writing empty file since Garmin did not generate a TCX file for" + " this activity...", end=" ", ) data = ""