diff --git a/.gitignore b/.gitignore index 5dc43a8..10c4361 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ - +[0-9\-]*_garmin_connect_export +.*.swp .DS_Store extras/ +*.pyc diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..4920340 --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ +SHELL := /bin/bash +COUNT := 4 +# DEBUG = --debug +DEBUG = +.PHONY: help +help: + @echo Usage: + @echo make go COUNT=\ + +.PHONY: verify_overlap +# the new activity file should overlap the old one (otherwise vimdiff +# sometimes seems confused). +verify_overlap: + @grep -q $(shell tail -1 $(shell find . -name activities.csv) | cut --delimiter=, --fields=1) ../garmin_running/activities.csv + +.PHONY: go +go: + ./gcdownload.py --username aaronferrucci --count $(COUNT) $(DEBUG) + +NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) +.PHONY: count_activities_csv +count_activities_csv: + @if [ $(NUM_ACTIVITIES) -ne 1 ] ; then \ + echo "Too many activities.csv files found ($(NUM_ACTIVITIES))"; \ + false; \ + fi + +.PHONY: vimdiff +vimdiff: verify_overlap + gvimdiff ../garmin_running/activities.csv $(shell find . -name activities.csv) diff --git a/cmdlineargs.py b/cmdlineargs.py new file mode 100644 index 0000000..aa88997 --- /dev/null +++ b/cmdlineargs.py @@ -0,0 +1,53 @@ +import argparse +from datetime import datetime +def get_args(): + current_date = datetime.now().strftime('%Y-%m-%d') + activities_directory = './' + current_date + '_garmin_connect_export' + parser = argparse.ArgumentParser() + + parser.add_argument( + '--quiet', + help="stifle all output", + action="store_true" + ) + parser.add_argument( + '--debug', + help="lots of console output", + action="store_true" + ) + parser.add_argument( + '--version', + help="print version and exit", + action="store_true" + ) + parser.add_argument( + '--username', + help="your Garmin Connect username (otherwise, you will be prompted)", + nargs='?' + ) + parser.add_argument( + '--password', + help="your Garmin Connect password (otherwise, you will be prompted)", + nargs='?' + ) + + parser.add_argument( + '-c', + '--count', + nargs='?', + default="1", + help="number of recent activities to download (default: 1)" + ) + + parser.add_argument( + '-d', + '--directory', + nargs='?', + default=activities_directory, + help="save directory (default: './YYYY-MM-DD_garmin_connect_export')" + ) + + args = parser.parse_args() + return args + + diff --git a/deviceinfo.py b/deviceinfo.py new file mode 100644 index 0000000..f8105ab --- /dev/null +++ b/deviceinfo.py @@ -0,0 +1,55 @@ +import json +class DeviceInfo(): + devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" + keys = [ + 'currentFirmwareVersion', + 'displayName', + 'partNumber', + 'serialNumber', + ] + + def __init__(self, session): + self.session = session + http_data = session.get(DeviceInfo.devices_url, allow_redirects=False) + if http_data.status_code != 200: + print("DeviceInfo error code: %d" % (http_data.status_code)) + self.devices = None + return + + devices = json.loads(http_data.text) + self.device_info = {} + for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in DeviceInfo.keys: + this_device[key] = dev.get(key, None) + self.device_info[dev_id] = this_device + + # backward compatibility hack: prepend ' ', append ".0.0" + # to firmware version. + for dev_id in self.device_info: + fw = self.device_info[dev_id]['currentFirmwareVersion'] + fw = ' ' + fw + ".0.0" + self.device_info[dev_id]['currentFirmwareVersion'] = fw + + def do_print(self): + for dev_id in self.device_info: + print(dev_id) + for dev_parameter in self.device_info[dev_id]: + print(" " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter]) + + def displayName(self, deviceId): + try: + device = self.device_info[deviceId]['displayName'] + except KeyError: + device = "" + + try: + version = self.device_info[deviceId]['currentFirmwareVersion'] + except KeyError: + version = "" + + displayName = device + ' ' + version + return displayName + + diff --git a/gcdownload.py b/gcdownload.py new file mode 100755 index 0000000..35efdb3 --- /dev/null +++ b/gcdownload.py @@ -0,0 +1,126 @@ +#!/usr/bin/python3 +import json + +from gclogin import GarminLogin +from properties import Properties +from deviceinfo import DeviceInfo +from typeinfo import TypeInfo +from cmdlineargs import get_args +from utils import csvFormat, dictFind, activity_to_csv +from sys import argv +from os.path import isdir +from os.path import isfile +from os import mkdir +from getpass import getpass + +script_version = '1.4.0' +args = get_args() +if args.version: + print(argv[0] + ", version " + script_version) + exit(0) + +# utilities - put these somewhere else? + +gcl = GarminLogin() +username = args.username if args.username else raw_input('Username: ') +password = args.password if args.password else getpass() +session = gcl._get_session(email=username, password=password) + +devInfo = DeviceInfo(session) +# print("\nDevices:") +# devInfo.do_print() + +# activity properties +activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' +activity_properties = Properties(session, activity_properties_url, b'activity_type_') +# print("\nActivity Properties:") +# activity_properties.do_print() + +activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" +activity_type_info = TypeInfo(session, activity_type_url, activity_properties) +# print("\nActivity Type Info:") +# activity_type_info.do_print() + +event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' +event_properties = Properties(session, event_properties_url) +# print("\nEvent Properties:") +# event_properties.do_print() + +event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' +event_type_info = TypeInfo(session, event_type_url, event_properties) +# print("\nEvent Type Info:") +# event_type_info.do_print() + +if not isdir(args.directory): + mkdir(args.directory) + + +csv_filename = args.directory + '/activities.csv' +csv_existed = isfile(csv_filename) + +csv_file = open(csv_filename, 'a') + +# Write header to CSV file +if not csv_existed: + csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') + +# start of experimental get-activities code +total_to_download = int(args.count) +total_downloaded = 0 +url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' +url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' +while total_downloaded < total_to_download: + # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. + if total_to_download - total_downloaded > 100: + num_to_download = 100 + else: + num_to_download = total_to_download - total_downloaded + + search_params = {'start': total_downloaded, 'limit': num_to_download} + http_data = session.get(url_gc_search, params=search_params) + if http_data.status_code != 200: + print("Activity load error code: %d" % (http_data.status_code)) + + activities = json.loads(http_data.text) + + # print "### activities:" + # print json.dumps(activities, indent=4, sort_keys=True) + # print "###" + + for a in activities: + activityId = str(a['activityId']) + + if not args.quiet: + print('activity: [' + activityId + ']') + print(a['activityName']) + modern_activity_url = url_gc_modern_activity + activityId + if args.debug: + print("url: " + modern_activity_url) + + result = session.get(modern_activity_url) + results = json.loads(result.text) + + activity_filename = args.directory + '/' + activityId + '.json' + if args.debug: + print("filename: " + activity_filename) + + save_file = open(activity_filename, 'w') + save_file.write(json.dumps(results, indent=4, sort_keys=True)) + save_file.close() + + # Write stats to CSV. + csv_record = activity_to_csv(results, a, devInfo, activity_type_info, event_type_info) + if args.debug: + print("data: " + csv_record) + + # csv_file.write(csv_record.encode('utf8')) + csv_file.write(csv_record) + + total_downloaded += num_to_download +# End while loop for multiple chunks. + +csv_file.close() + +if not args.quiet: + print('Done!') + diff --git a/gcexport.py b/gcexport.py index 2c6fec7..4f3376e 100755 --- a/gcexport.py +++ b/gcexport.py @@ -2,14 +2,13 @@ """ File: gcexport.py -Author: Kyle Krafka (https://github.com/kjkjava/) -Date: April 28, 2015 - -Description: Use this script to export your fitness data from Garmin Connect. - See README.md for more information. +Original author: Kyle Krafka (https://github.com/kjkjava/) +Description: Use this script to export your fitness data from Garmin's servers. + See README.md for more information. """ from urllib import urlencode +import urllib2, cookielib, json from datetime import datetime from getpass import getpass from sys import argv @@ -17,69 +16,179 @@ from os.path import isfile from os import mkdir from os import remove -from xml.dom.minidom import parseString - -import urllib2, cookielib, json -from fileinput import filename import argparse -import zipfile -script_version = '1.0.0' +class DeviceInfo(): + devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" + keys = [ + 'currentFirmwareVersion', + 'displayName', + 'partNumber', + 'serialNumber', + ] + + def __init__(self): + self.device_info = {} + devices = json.loads(http_req(self.devices_url)) + for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in self.keys: + this_device[key] = dictFind(dev, [key, ]) + self.device_info[dev_id] = this_device + + # backward compatibility hack: prepend ' ', append ".0.0" + # to firmware version. + for dev_id in self.device_info: + fw = self.device_info[dev_id]['currentFirmwareVersion'] + fw = ' ' + fw + ".0.0" + self.device_info[dev_id]['currentFirmwareVersion'] = fw + + def printit(self): + for dev_id in self.device_info: + print dev_id + for dev_parameter in self.device_info[dev_id]: + print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] + + def displayName(self, deviceId): + try: + device = self.device_info[deviceId]['displayName'] + except KeyError: + device = "" + + try: + version = self.device_info[deviceId]['currentFirmwareVersion'] + except KeyError: + version = "" + + displayName = device + ' ' + version + return displayName + +class Properties(): + """Properties: utility class that stores data from a URL in a dict. Values + in the dict are accessed by get(), which provides a default value. + + Data from the URL are expected be in string form, with multiple lines + in key=value format. + + Keys may be decorated with a to-be-removed prefix + """ + def __init__(self, url, key_trim_prefix = None): + self.key_trim_prefix = key_trim_prefix + + self.properties = {} + http_data = http_req(url) + http_lines = http_data.splitlines() + for line in http_lines: + (key, value) = line.split('=') + if (key_trim_prefix != None): + key = key.replace("activity_type_", "") + self.properties[key] = value + + # Get a value, default to key as value + def get(self, key): + try: + value = self.properties[key] + except KeyError: + value = key + return value + +script_version = '1.3.2' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' parser = argparse.ArgumentParser() -# TODO: Implement verbose and/or quiet options. -# parser.add_argument('-v', '--verbose', help="increase output verbosity", action="store_true") -parser.add_argument('--version', help="print version and exit", action="store_true") -parser.add_argument('--username', help="your Garmin Connect username (otherwise, you will be prompted)", nargs='?') -parser.add_argument('--password', help="your Garmin Connect password (otherwise, you will be prompted)", nargs='?') - -parser.add_argument('-c', '--count', nargs='?', default="1", - help="number of recent activities to download, or 'all' (default: 1)") - -parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", - help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") - -parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, - help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") - -parser.add_argument('-u', '--unzip', - help="if downloading ZIP files (format: 'original'), unzip the file and removes the ZIP file", - action="store_true") +parser.add_argument( + '--quiet', + help="stifle all output", + action="store_true" +) +parser.add_argument( + '--debug', + help="lots of console output", + action="store_true" +) +parser.add_argument( + '--version', + help="print version and exit", + action="store_true" +) +parser.add_argument( + '--username', + help="your Garmin Connect username (otherwise, you will be prompted)", + nargs='?' +) +parser.add_argument( + '--password', + help="your Garmin Connect password (otherwise, you will be prompted)", + nargs='?' +) + +parser.add_argument( + '-c', + '--count', + nargs='?', + default="1", + help="number of recent activities to download (default: 1)" +) + +parser.add_argument( + '-d', + '--directory', + nargs='?', + default=activities_directory, + help="save directory (default: './YYYY-MM-DD_garmin_connect_export')" +) args = parser.parse_args() if args.version: - print argv[0] + ", version " + script_version - exit(0) + print argv[0] + ", version " + script_version + exit(0) cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar)) +def csvFormat(value): + csv_record = '"' + str(value).replace('"', '""') + '",' + return csv_record + +def dictFind(data, keys): + try: + for key in keys: + data = data[key] + except KeyError: + return "" + return data + # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers={}): - request = urllib2.Request(url) - request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. - for header_key, header_value in headers.iteritems(): - request.add_header(header_key, header_value) - if post: - post = urlencode(post) # Convert dictionary to POST parameter string. - response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError. + if args.debug: + print "### http_req(" + url + ")" + + request = urllib2.Request(url) + request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36') # Tell Garmin we're some supported browser. + for header_key, header_value in headers.iteritems(): + request.add_header(header_key, header_value) + if post: + post = urlencode(post) # Convert dictionary to POST parameter string. + response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError. + + # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. + if response.getcode() != 200: + raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) - # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. - if response.getcode() != 200: - raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) + return response.read() - return response.read() -print 'Welcome to Garmin Connect Exporter!' +if not args.quiet: + print 'Welcome to Garmin Connect Exporter!' # Create directory for data files. if isdir(args.directory): - print 'Warning: Output directory already exists. Will skip already-downloaded files and append to the CSV file.' + print 'Warning: Output directory already exists. Will skip already-downloaded files and append to the CSV file.' username = args.username if args.username else raw_input('Username: ') password = args.password if args.password else getpass() @@ -88,39 +197,116 @@ def http_req(url, post=None, headers={}): limit_maximum = 100 # URLs for various services. -url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' -url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?' -url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' +url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' +url_gc_post_auth = 'https://connect.garmin.com/modern/?' +url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' +url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' # Initially, we need to get a valid session cookie, so we pull the login page. http_req(url_gc_login) -# Now we'll actually login. -post_data = {'username': username, 'password': password, 'embed': 'true', 'lt': 'e1s1', '_eventId': 'submit', 'displayNameRequired': 'false'} # Fields that are passed in a typical Garmin login. +# Now we'll actually login, using fields that are passed in a typical +# Garmin login. +post_data = { + 'username': username, + 'password': password, + 'embed': 'true', + 'lt': 'e1s1', + '_eventId': 'submit', + 'displayNameRequired': 'false'} http_req(url_gc_login, post_data) # Get the key. # TODO: Can we do this without iterating? login_ticket = None for cookie in cookie_jar: - if cookie.name == 'CASTGC': - login_ticket = cookie.value - break + if args.debug: + print "### cookie.name: " + cookie.name + if cookie.name == 'CASTGC': + login_ticket = cookie.value + if args.debug: + print "### selected login_ticket: " + login_ticket + break if not login_ticket: - raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') + raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') -# Chop of 'TGT-' off the beginning, prepend 'ST-0'. +# Chop 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] +if args.debug: + print "### modified login_ticket: " + login_ticket -http_req(url_gc_post_auth + 'ticket=' + login_ticket) +login_url = url_gc_post_auth + 'ticket=' + login_ticket +http_req(login_url) # We should be logged in now. + +deviceInfo = DeviceInfo() + +# get activity properties +# This maps cryptic activity typeKeys to display names +# all:: All Activities +# golf:: Golf +# indoor_cycling:: Indoor Cycling +# ... +# street_running:: Street Running +# +# keys appear in activity records, activityType/typeKey +activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' +activity_properties = Properties(activity_properties_url, "activity_type_") + +# get activity type info, put in a dict +activity_type_info = {} +activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" +activity_types = json.loads(http_req(activity_type_url)) +keys = ['typeKey', ] +for a_type in activity_types: + type_id = a_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(a_type, [key, ]) + # Set type from typeKey + this_type['type'] = activity_properties.get(this_type['typeKey']) + activity_type_info[type_id] = this_type + +if args.debug: + print "### activity_type_info" + for a_type in activity_type_info: + print a_type + for activity_parameter in activity_type_info[a_type]: + print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) + print "###" + +event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' +event_properties = Properties(event_properties_url) + +# get event type info, put in a dict +event_type_info = {} +event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' +event_types = json.loads(http_req(event_type_url)) +keys = ['typeKey', ] +for e_type in event_types: + type_id = e_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(e_type, [key, ]) + # Set type from typeKey + this_type['type'] = event_properties.get(this_type['typeKey']) + event_type_info[type_id] = this_type + +if args.debug: + print "### event_type_info" + for e_type in event_type_info: + print e_type + for event_parameter in event_type_info[e_type]: + print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) + print "###" + if not isdir(args.directory): - mkdir(args.directory) + mkdir(args.directory) csv_filename = args.directory + '/activities.csv' csv_existed = isfile(csv_filename) @@ -129,188 +315,179 @@ def http_req(url, post=None, headers={}): # Write header to CSV file if not csv_existed: - csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') - -download_all = False -if args.count == 'all': - # If the user wants to download all activities, first download one, - # then the result of that request will tell us how many are available - # so we will modify the variables then. - total_to_download = 1 - download_all = True -else: - total_to_download = int(args.count) + csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') + +total_to_download = int(args.count) total_downloaded = 0 # This while loop will download data from the server in multiple chunks, if necessary. while total_downloaded < total_to_download: - # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. - if total_to_download - total_downloaded > 100: - num_to_download = 100 - else: - num_to_download = total_to_download - total_downloaded - - search_params = {'start': total_downloaded, 'limit': num_to_download} - # Query Garmin Connect - result = http_req(url_gc_search + urlencode(search_params)) - json_results = json.loads(result) # TODO: Catch possible exceptions here. - - - search = json_results['results']['search'] - - if download_all: - # Modify total_to_download based on how many activities the server reports. - total_to_download = int(search['totalFound']) - # Do it only once. - download_all = False - - # Pull out just the list of activities. - activities = json_results['results']['activities'] - - # Process each activity. - for a in activities: - # Display which entry we're working on. - print 'Garmin Connect activity: [' + a['activity']['activityId'] + ']', - print a['activity']['activityName']['value'] - print '\t' + a['activity']['beginTimestamp']['display'] + ',', - if 'sumElapsedDuration' in a['activity']: - print a['activity']['sumElapsedDuration']['display'] + ',', - else: - print '??:??:??,', - if 'sumDistance' in a['activity']: - print a['activity']['sumDistance']['withUnit'] - else: - print '0.00 Miles' - - if args.format == 'gpx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.gpx' - download_url = url_gc_gpx_activity + a['activity']['activityId'] + '?full=true' - file_mode = 'w' - elif args.format == 'tcx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.tcx' - download_url = url_gc_tcx_activity + a['activity']['activityId'] + '?full=true' - file_mode = 'w' - elif args.format == 'original': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.zip' - fit_filename = args.directory + '/' + a['activity']['activityId'] + '.fit' - download_url = url_gc_original_activity + a['activity']['activityId'] - file_mode = 'wb' - else: - raise Exception('Unrecognized format.') - - if isfile(data_filename): - print '\tData file already exists; skipping...' - continue - if args.format == 'original' and isfile(fit_filename): # Regardless of unzip setting, don't redownload if the ZIP or FIT file exists. - print '\tFIT data file already exists; skipping...' - continue - - # Download the data file from Garmin Connect. - # If the download fails (e.g., due to timeout), this script will die, but nothing - # will have been written to disk about this activity, so just running it again - # should pick up where it left off. - print '\tDownloading file...', - - try: - data = http_req(download_url) - except urllib2.HTTPError as e: - # Handle expected (though unfortunate) error codes; die on unexpected ones. - if e.code == 500 and args.format == 'tcx': - # Garmin will give an internal server error (HTTP 500) when downloading TCX files if the original was a manual GPX upload. - # Writing an empty file prevents this file from being redownloaded, similar to the way GPX files are saved even when there are no tracks. - # One could be generated here, but that's a bit much. Use the GPX format if you want actual data in every file, - # as I believe Garmin provides a GPX file for every activity. - print 'Writing empty file since Garmin did not generate a TCX file for this activity...', - data = '' - elif e.code == 404 and args.format == 'original': - # For manual activities (i.e., entered in online without a file upload), there is no original file. - # Write an empty file to prevent redownloading it. - print 'Writing empty file since there was no original activity data...', - data = '' - else: - raise Exception('Failed. Got an unexpected HTTP error (' + str(e.code) + ').') - - save_file = open(data_filename, file_mode) - save_file.write(data) - save_file.close() - - # Write stats to CSV. - empty_record = '"",' - - csv_record = '' - - csv_record += empty_record if 'activityId' not in a['activity'] else '"' + a['activity']['activityId'].replace('"', '""') + '",' - csv_record += empty_record if 'activityName' not in a['activity'] else '"' + a['activity']['activityName']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + a['activity']['activityDescription']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['millis'].replace('"', '""') + '",' - csv_record += empty_record if 'device' not in a['activity'] else '"' + a['activity']['device']['display'].replace('"', '""') + ' ' + a['activity']['device']['version'].replace('"', '""') + '",' - csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['parent']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'eventType' not in a['activity'] else '"' + a['activity']['eventType']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'activityTimeZone' not in a['activity'] else '"' + a['activity']['activityTimeZone']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLatitude' not in a['activity'] else '"' + a['activity']['beginLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLongitude' not in a['activity'] else '"' + a['activity']['beginLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLatitude' not in a['activity'] else '"' + a['activity']['endLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLongitude' not in a['activity'] else '"' + a['activity']['endLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'minHeartRate' not in a['activity'] else '"' + a['activity']['minHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['value'].replace('"', '""') + '"' - csv_record += '\n' - - csv_file.write(csv_record.encode('utf8')) - - if args.format == 'gpx': - # Validate GPX data. If we have an activity without GPS data (e.g., running on a treadmill), - # Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. - # N.B. You can omit the XML parse (and the associated log messages) to speed things up. - gpx = parseString(data) - gpx_data_exists = len(gpx.getElementsByTagName('trkpt')) > 0 - - if gpx_data_exists: - print 'Done. GPX data saved.' - else: - print 'Done. No track points found.' - elif args.format == 'original': - if args.unzip and data_filename[-3:].lower() == 'zip': # Even manual upload of a GPX file is zipped, but we'll validate the extension. - print "Unzipping and removing original files...", - zip_file = open(data_filename, 'rb') - z = zipfile.ZipFile(zip_file) - for name in z.namelist(): - z.extract(name, args.directory) - zip_file.close() - remove(data_filename) - print 'Done.' - else: - # TODO: Consider validating other formats. - print 'Done.' - total_downloaded += num_to_download + # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. + if total_to_download - total_downloaded > 100: + num_to_download = 100 + else: + num_to_download = total_to_download - total_downloaded + + search_params = {'start': total_downloaded, 'limit': num_to_download} + # Query Garmin Connect + query_url = url_gc_search + urlencode(search_params) + result = http_req(query_url) + json_results = json.loads(result) # TODO: Catch possible exceptions here. + + if args.debug: + print "### json_results:" + print json.dumps(json_results, indent=4, sort_keys=True) + print "###" + + # Pull out just the list of activities. + # Only the activityId is used. + # json_results used to be a deep hierarchy, but ... no longer + activities = json_results + + # Process each activity. + for a in activities: + activityId = str(a['activityId']) + + if not args.quiet: + print 'activity: [' + activityId + ']', + print a['activityName'] + modern_activity_url = url_gc_modern_activity + activityId + + if args.debug: + print "url: " + modern_activity_url + + activity_filename = args.directory + '/' + activityId + '.json' + if args.debug: + print "filename: " + activity_filename + result = http_req(modern_activity_url) + results = json.loads(result) + + save_file = open(activity_filename, 'w') + save_file.write(json.dumps(results, indent=4, sort_keys=True)) + save_file.close() + + # Write stats to CSV. + empty_record = '"",' + csv_record = '' + # Activity ID + csv_record += csvFormat(activityId) + # Activity Name + csv_record += csvFormat(dictFind(results, ['activityName', ])) + # Description + csv_record += csvFormat(dictFind(results, ['description', ])) + # Begin Timestamp + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) + + # Begin Timestamp (Raw Milliseconds) + csv_record += empty_record + + # End Timestamp + csv_record += empty_record + + # End Timestamp (Raw Milliseconds) + csv_record += empty_record + + # Device + deviceId = dictFind(a, ['deviceId', ]) + csv_record += csvFormat(deviceInfo.displayName(deviceId)) + + # Activity Parent + parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) + # Activity Type + typeId = dictFind(a, ['activityType', 'typeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) + + # Event Type + typeId = dictFind(a, ['eventType', 'typeId',]) + csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) + # Activity Time Zone + csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) + + # Max. Elevation + csv_record += empty_record + # Max. Elevation (Raw) + # (was in feet previously, now appears to be meters) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + + # {start, end} X {latitude, longitude} + # Begin Latitude (Decimal Degrees Raw) + # Begin Longitude (Decimal Degrees Raw) + # End Latitude (Decimal Degrees Raw) + # End Longitude (Decimal Degrees Raw) + for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: + csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) + + # Average Moving Speed + csv_record += empty_record + + # Average Moving Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + # Average Heart Rate (bpm) + csv_record += empty_record + + # Max. Speed + csv_record += empty_record + # Max. Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) + + # Calories + csv_record += empty_record + # Calories (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + + # Duration (h:m:s) + csv_record += empty_record + # Duration (Raw Seconds) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) + # Moving Duration (h:m:s) + csv_record += empty_record + # Moving Duration (Raw Seconds), + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) + # Average Speed + csv_record += empty_record + # Average Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) + # Distance + csv_record += empty_record + # distance.value + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + + # Min. Elevation + csv_record += empty_record + # Min. Elevation (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) + + # Elevation Gain + csv_record += empty_record + # Elevation Gain (Raw) + csv_record += empty_record + # Elevation Loss + csv_record += empty_record + # Elevation Loss (Raw) + csv_record += empty_record + + # remove any trailing commas - R read.csv doesn't like them. + csv_record = csv_record.rstrip(',') + + csv_record += '\n' + + if args.debug: + print "data: " + csv_record + + csv_file.write(csv_record.encode('utf8')) + + total_downloaded += num_to_download # End while loop for multiple chunks. csv_file.close() -print 'Done!' +if not args.quiet: + print 'Done!' + diff --git a/gclogin.py b/gclogin.py new file mode 100644 index 0000000..1600e42 --- /dev/null +++ b/gclogin.py @@ -0,0 +1,136 @@ +import requests +import tempfile + +HTTP_SOURCE_ADDR = '0.0.0.0' +class GarminLogin(): + _garmin_signin_headers = { + "origin": "https://sso.garmin.com" + } + + # Copied from https://github.com/moderation/garmin-connect-export + # Starting around 2/2021, all requests return 402. nk=NT fixes it. + # May 2021 it became necessary to supply a User-Agent. + _obligatory_headers = { + 'nk': 'NT', + 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.69 Safari/537.36', + } + + # To do: pull in sessioncache from tapiriik, or omit if that's possible + # _sessionCache = SessionCache("garminconnect", lifetime=timedelta(minutes=120), freshen_on_get=True) + + def _rate_limit(self): + import fcntl, struct, time + min_period = 1 # I appear to been banned from Garmin Connect while determining this. + fcntl.flock(self._rate_lock,fcntl.LOCK_EX) + try: + self._rate_lock.seek(0) + last_req_start = self._rate_lock.read() + if not last_req_start: + last_req_start = 0 + else: + last_req_start = float(last_req_start) + + wait_time = max(0, min_period - (time.time() - last_req_start)) + # print("_rate_limit: wait: '%s'; last_req_start: '%s'" % (wait_time, last_req_start)) + time.sleep(wait_time) + + self._rate_lock.seek(0) + self._rate_lock.write(str(time.time())) + self._rate_lock.flush() + finally: + fcntl.flock(self._rate_lock,fcntl.LOCK_UN) + + def __init__(self): + rate_lock_path = tempfile.gettempdir() + "/gc_rate.%s.lock" % HTTP_SOURCE_ADDR + # print("rate_lock_path: '%s'" % rate_lock_path) + # Ensure the rate lock file exists (...the easy way) + open(rate_lock_path, "a").close() + self._rate_lock = open(rate_lock_path, "r+") + + def _get_session(self, email, password): + + session = requests.Session() + session.headers.update(self._obligatory_headers) + + # JSIG CAS, cool I guess. + # Not quite OAuth though, so I'll continue to collect raw credentials. + # Commented stuff left in case this ever breaks because of missing parameters... + data = { + "username": email, + "password": password, + "_eventId": "submit", + "embed": "true", + # "displayNameRequired": "false" + } + params = { + "service": "https://connect.garmin.com/modern", + # "redirectAfterAccountLoginUrl": "http://connect.garmin.com/modern", + # "redirectAfterAccountCreationUrl": "http://connect.garmin.com/modern", + # "webhost": "olaxpw-connect00.garmin.com", + "clientId": "GarminConnect", + "gauthHost": "https://sso.garmin.com/sso", + # "rememberMeShown": "true", + # "rememberMeChecked": "false", + "consumeServiceTicket": "false", + # "id": "gauth-widget", + # "embedWidget": "false", + # "cssUrl": "https://static.garmincdn.com/com.garmin.connect/ui/src-css/gauth-custom.css", + # "source": "http://connect.garmin.com/en-US/signin", + # "createAccountShown": "true", + # "openCreateAccount": "false", + # "usernameShown": "true", + # "displayNameShown": "false", + # "initialFocus": "true", + # "locale": "en" + } + + # I may never understand what motivates people to mangle a perfectly good protocol like HTTP in the ways they do... + preResp = session.get("https://sso.garmin.com/sso/signin", params=params) + if preResp.status_code != 200: + raise Exception("SSO prestart error %s %s" % (preResp.status_code, preResp.text)) + + ssoResp = session.post("https://sso.garmin.com/sso/signin", headers=self._garmin_signin_headers, params=params, data=data, allow_redirects=False) + if ssoResp.status_code != 200 or "temporarily unavailable" in ssoResp.text: + raise Exception("SSO error %s %s" % (ssoResp.status_code, ssoResp.text)) + + if ">sendEvent('FAIL')" in ssoResp.text: + raise Exception("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) + if ">sendEvent('ACCOUNT_LOCKED')" in ssoResp.text: + raise Exception("Account Locked", block=True, user_exception=UserException(UserExceptionType.Locked, intervention_required=True)) + + if "renewPassword" in ssoResp.text: + raise Exception("Reset password", block=True, user_exception=UserException(UserExceptionType.RenewPassword, intervention_required=True)) + + # ...AND WE'RE NOT DONE YET! + self._rate_limit() + + gcRedeemResp = session.get("https://connect.garmin.com/modern", allow_redirects=False) + if gcRedeemResp.status_code != 302: + raise Exception("GC redeem-start error %s %s" % (gcRedeemResp.status_code, gcRedeemResp.text)) + url_prefix = "https://connect.garmin.com" + # There are 6 redirects that need to be followed to get the correct cookie + # ... :( + max_redirect_count = 7 + current_redirect_count = 1 + while True: + self._rate_limit() + url = gcRedeemResp.headers["location"] + # Fix up relative redirects. + if url.startswith("/"): + url = url_prefix + url + url_prefix = "/".join(url.split("/")[:3]) + # print("url: '%s'" % url) + gcRedeemResp = session.get(url, allow_redirects=False) + + if current_redirect_count >= max_redirect_count and gcRedeemResp.status_code != 200: + raise Exception("GC redeem %d/%d error %s %s" % (current_redirect_count, max_redirect_count, gcRedeemResp.status_code, gcRedeemResp.text)) + if gcRedeemResp.status_code == 200 or gcRedeemResp.status_code == 404: + break + current_redirect_count += 1 + if current_redirect_count > max_redirect_count: + break + + # self._sessionCache.Set(record.ExternalID if record else email, session) + + return session + diff --git a/properties.py b/properties.py new file mode 100644 index 0000000..9e86699 --- /dev/null +++ b/properties.py @@ -0,0 +1,36 @@ +import requests + +class Properties(): + """Properties: utility class that stores data from a URL in a dict. Values + in the dict are accessed by get(), which provides a default value. + + Data from the URL are expected to be in string form, with multiple lines + in key=value format. (At some point strings became bytes. I store both + string and bytes values, for now.) + + If key_trim_prefix is provided, its value is deleted from key names. + """ + def __init__(self, session, url, key_trim_prefix = None): + self.properties = {} + + http_data = session.get(url, allow_redirects=False) + for line in http_data.iter_lines(): + (key, value) = line.split(b'=') + if (key_trim_prefix != None): + key = key.replace(key_trim_prefix, b'') + self.properties[key] = value + # key, value are bytes. record a parallel string value + self.properties[key.decode('utf-8')] = value.decode('utf-8') + + # Get a value, default to key as value + def get(self, key): + try: + value = self.properties[key] + except KeyError: + value = key + return value + + def do_print(self): + for key in self.properties: + print(" %s=%s" % (key, self.properties[key])) + diff --git a/typeinfo.py b/typeinfo.py new file mode 100644 index 0000000..1f70692 --- /dev/null +++ b/typeinfo.py @@ -0,0 +1,36 @@ +import requests +import json + +class TypeInfo(): + """ + TypeInfo: utility dict wrapper class + Looks up types in a url and an associated Properties instance + """ + def __init__(self, session, url, props): + self.type_info = {} + http_data = session.get(url, allow_redirects=False) + if http_data.status_code != 200: + print("TypeInfo error code: %d" % (http_data.status_code)) + self.type_info = None + return + + types = json.loads(http_data.text) + key = 'typeKey' + for _type in types: + type_id = _type['typeId'] + this_type = {} + this_type[key] = _type.get(key, "") + # Set type from typeKey + this_type['type'] = props.get(this_type[key]) + self.type_info[type_id] = this_type + + def do_print(self): + print("### type_info") + for _type in self.type_info: + print(_type) + for param in self.type_info[_type]: + print(" " + param + ": " + str(self.type_info[_type][param])) + print("###") + + def __getitem__(self, key): + return self.type_info[key] diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..b30e3ee --- /dev/null +++ b/utils.py @@ -0,0 +1,129 @@ +def csvFormat(value): + csv_record = '"' + str(value).replace('"', '""') + '",' + return csv_record + +# recursive dict get +def dictFind(data, keys): + try: + for key in keys: + data = data[key] + except KeyError: + return "" + return data + +def activity_to_csv(results, a, devInfo, activity_type_info, event_type_info): + empty_record = '"",' + csv_record = '' + # Activity ID + activityId = str(a['activityId']) + csv_record += csvFormat(activityId) + # Activity Name + csv_record += csvFormat(dictFind(results, ['activityName', ])) + # Description + csv_record += csvFormat(dictFind(results, ['description', ])) + # Begin Timestamp + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) + + # Begin Timestamp (Raw Milliseconds) + csv_record += empty_record + + # End Timestamp + csv_record += empty_record + + # End Timestamp (Raw Milliseconds) + csv_record += empty_record + + # Device + deviceId = dictFind(a, ['deviceId', ]) + csv_record += csvFormat(devInfo.displayName(deviceId)) + + # Activity Parent + parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) + # Activity Type + typeId = dictFind(a, ['activityType', 'typeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) + + # Event Type + typeId = dictFind(a, ['eventType', 'typeId',]) + csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) + # Activity Time Zone + csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) + + # Max. Elevation + csv_record += empty_record + # Max. Elevation (Raw) + # (was in feet previously, now appears to be meters) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + + # {start, end} X {latitude, longitude} + # Begin Latitude (Decimal Degrees Raw) + # Begin Longitude (Decimal Degrees Raw) + # End Latitude (Decimal Degrees Raw) + # End Longitude (Decimal Degrees Raw) + for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: + csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) + + # Average Moving Speed + csv_record += empty_record + + # Average Moving Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + # Average Heart Rate (bpm) + csv_record += empty_record + + # Max. Speed + csv_record += empty_record + # Max. Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) + + # Calories + csv_record += empty_record + # Calories (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + + # Duration (h:m:s) + csv_record += empty_record + # Duration (Raw Seconds) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) + # Moving Duration (h:m:s) + csv_record += empty_record + # Moving Duration (Raw Seconds), + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) + # Average Speed + csv_record += empty_record + # Average Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) + # Distance + csv_record += empty_record + # distance.value + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + + # Min. Elevation + csv_record += empty_record + # Min. Elevation (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) + + # Elevation Gain + csv_record += empty_record + # Elevation Gain (Raw) + csv_record += empty_record + # Elevation Loss + csv_record += empty_record + # Elevation Loss (Raw) + csv_record += empty_record + + # remove any trailing commas - R read.csv doesn't like them. + csv_record = csv_record.rstrip(',') + + csv_record += '\n' + + return csv_record + +